Upgrade opencensus-java to baa68680b19040ff7739e314a2422a25cf41e27a

This project was upgraded with external_updater.
Usage: tools/external_updater/updater.sh update external/opencensus-java
For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md

Test: TreeHugger
Change-Id: I1355b9f764a3d7937df68ca457602bcb7968bf76
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..4e2fdab
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,5 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+
+# For anything not explicitly taken by someone else:
+*               @census-instrumentation/global-owners @dinooliva @rghetia @songy23
diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE/bug_report.md
similarity index 65%
rename from .github/ISSUE_TEMPLATE
rename to .github/ISSUE_TEMPLATE/bug_report.md
index 7d484c6..a6a4722 100644
--- a/.github/ISSUE_TEMPLATE
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,3 +1,9 @@
+---
+name: Bug report
+about: Create a report to help us improve.
+labels: bug
+---
+
 Please answer these questions before submitting a bug report.
 
 ### What version of OpenCensus are you using?
@@ -13,4 +19,8 @@
 ### What did you expect to see?
 
 
-### What did you see instead?
\ No newline at end of file
+### What did you see instead?
+
+
+### Additional context
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..b670f55
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,22 @@
+---
+name: Feature request
+about: Suggest an idea for this project.
+labels: feature-request
+---
+
+**NB:** Before opening a feature request against this repo, consider whether the feature should be available across all languages in the OpenCensus libraries. If so, please [open an issue on opencensus-specs](https://github.com/census-instrumentation/opencensus-specs/issues/new) first.
+
+### Is your feature request related to a problem? If so, please describe it.
+A clear and concise description of what the problem is, e.g. I need to be able to ...
+
+
+### Describe the solution you'd like.
+A clear and concise description of what you want to happen.
+
+
+### Describe alternatives you've considered.
+A clear and concise description of any alternative solutions or features you've considered.
+
+
+### Additional context.
+Add any other context or screenshots about the feature request here.
diff --git a/.github/workflows/build-checker.yml b/.github/workflows/build-checker.yml
new file mode 100644
index 0000000..8b1ad5c
--- /dev/null
+++ b/.github/workflows/build-checker.yml
@@ -0,0 +1,26 @@
+name: Build Checker
+
+on: 
+  pull_request:
+      branches:
+      - master
+      - v0.*
+
+jobs:
+  build:
+    name: Build Checker
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2.3.4
+        with:
+          fetch-depth: 0
+      - id: setup-java
+        name: Setup Java 8
+        uses: actions/setup-java@v2
+        with:
+          distribution: adopt
+          java-version: 8
+      - uses: burrunan/gradle-cache-action@v1.10
+        with:
+          remote-build-cache-proxy-enabled: false
+          arguments: clean assemble -PcheckerFramework=true
\ No newline at end of file
diff --git a/.github/workflows/build-gradle-examples.yml b/.github/workflows/build-gradle-examples.yml
new file mode 100644
index 0000000..d84bc47
--- /dev/null
+++ b/.github/workflows/build-gradle-examples.yml
@@ -0,0 +1,31 @@
+name: Build Gradle Examples
+
+on: 
+  pull_request:
+      branches:
+      - master
+      - v0.*
+
+jobs:
+  build:
+    name: Build Gradle Examples Ubuntu
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2.3.4
+        with:
+          fetch-depth: 0
+      - id: setup-java
+        name: Setup Java 8
+        uses: actions/setup-java@v2
+        with:
+          distribution: adopt
+          java-version: 8
+      - uses: burrunan/gradle-cache-action@v1.10
+        with:
+          remote-build-cache-proxy-enabled: false
+      - name: Build Examples
+        working-directory: ./examples
+        run: ./gradlew clean assemble --stacktrace && ./gradlew check && ./gradlew verGJF 
+      - name: Build Spring Servlet example
+        working-directory: ./examples/spring/servlet
+        run: ./gradlew clean assemble --stacktrace && ./gradlew check && ./gradlew verGJF 
\ No newline at end of file
diff --git a/.github/workflows/build-maven-examples.yml b/.github/workflows/build-maven-examples.yml
new file mode 100644
index 0000000..33dc65f
--- /dev/null
+++ b/.github/workflows/build-maven-examples.yml
@@ -0,0 +1,28 @@
+name: Build Maven Examples
+
+on: 
+  pull_request:
+      branches:
+      - master
+      - v0.*
+
+jobs:
+  build:
+    name: Build Maven Examples Ubuntu
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2.3.4
+        with:
+          fetch-depth: 0
+      - id: setup-java
+        name: Setup Java 8
+        uses: actions/setup-java@v2
+        with:
+          distribution: adopt
+          java-version: 8
+      - name: Build Examples
+        working-directory: ./examples
+        run: mvn --batch-mode --update-snapshots clean package appassembler:assemble -e
+      - name: Build Spring Servlet example
+        working-directory: ./examples/spring/servlet
+        run: mvn --batch-mode --update-snapshots clean package appassembler:assemble -e
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..da2af11
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,37 @@
+name: Build
+
+on: 
+  pull_request:
+      branches:
+      - master
+      - v0.*
+
+jobs:
+  build:
+    name: Build
+    runs-on: ${{ matrix.os }}
+    strategy:
+      matrix:
+        os:
+          - macos-latest
+          - ubuntu-latest
+        java:
+          - 8
+          # TODO: Java 11 build doesn't work due to Java7 target.
+        include:
+          - os: ubuntu-latest
+            java: 8
+            coverage: true
+    steps:
+      - uses: actions/checkout@v2.3.4
+        with:
+          fetch-depth: 0
+      - id: setup-java
+        name: Setup Java ${{ matrix.java }}
+        uses: actions/setup-java@v2
+        with:
+          distribution: adopt
+          java-version: ${{ matrix.java }}
+      - run: ./gradlew clean assemble check --stacktrace
+      # TODO: Run jacocoTestReport
+      # TODO: Run verGJF
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 996d4c0..75e75b4 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,25 +5,24 @@
 matrix:
   fast_finish: true
   include:
-  - jdk: openjdk7
+  - jdk: openjdk8
     env: TASK=BUILD
     os: linux
 
-  - jdk: oraclejdk8
+  - jdk: oraclejdk9
     env: TASK=BUILD
     os: linux
-    addons:
-      apt:
-        packages:
-          # Install the JREs that are used for integration tests in
-          # contrib/agent, but are not installed by default.
-          - openjdk-6-jdk
 
-  # - jdk: oraclejdk9
-  #   env: TASK=BUILD
-  #   os: linux
+  - jdk: openjdk10
+    env: TASK=BUILD
+    os: linux
 
-  - jdk: oraclejdk8
+# TODO(songy23): fix JDK 11 build
+#  - jdk: openjdk11
+#    env: TASK=BUILD
+#    os: linux
+
+  - jdk: openjdk8
     env: TASK=CHECKER_FRAMEWORK
     os: linux
 
@@ -31,28 +30,18 @@
     os: linux
 
   # Build example projects last, since they are affected by fewer pull requests.
-  - jdk: oraclejdk8
-    env: TASK=CHECK_EXAMPLES_LICENSE
-    os: linux
-
-  - jdk: oraclejdk8
+  - jdk: openjdk8
     env: TASK=BUILD_EXAMPLES_GRADLE
     os: linux
 
-  - jdk: oraclejdk8
+  - jdk: openjdk8
     env: TASK=BUILD_EXAMPLES_MAVEN
     os: linux
 
-  - jdk: oraclejdk8
-    env: TASK=BUILD_EXAMPLES_BAZEL
-    os: linux
-
-  - jdk: oraclejdk8
-    env: TASK=CHECK_EXAMPLES_FORMAT
-    os: linux
-
   # Work around https://github.com/travis-ci/travis-ci/issues/2317
   - env: TASK=BUILD
+    # Set the java version to 1.8 until we fix the build for java10.
+    osx_image: xcode9.3
     os: osx
 
   allow_failures:
@@ -62,22 +51,16 @@
 
 before_install:
   - git log --oneline --decorate --graph -30
-  - if \[ "$TASK" == "BUILD_EXAMPLES_BAZEL" \]; then
-      echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" | sudo tee /etc/apt/sources.list.d/bazel.list ;
-      curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - ;
-      sudo apt-get update ;
-      sudo apt-get install bazel ;
-    fi
 
 # Skip Travis' default Gradle install step. See http://stackoverflow.com/a/26575080.
 install: true
 
 script:
-  - scripts/travis_script
+  - buildscripts/travis_script
 
 after_success:
-  - if \[ "$TASK" == "BUILD" \] && \[ "$TRAVIS_JDK_VERSION" == "oraclejdk8" \] && \[ "$TRAVIS_OS_NAME" = linux \]; then
-      bash <(curl -s https://codecov.io/bash) ;
+  - if \[ "$TASK" == "BUILD" \] && \[ "$TRAVIS_JDK_VERSION" == "openjdk8" \] && \[ "$TRAVIS_OS_NAME" = linux \]; then
+    bash <(curl -s https://codecov.io/bash) ;
     fi
 
 before_cache:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 352c241..d87ac88 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,115 @@
 ## Unreleased
+
+- fix: Shutdown `MetricServiceClient` properly on `StackdriverStatsExporter.unregister()` (#2007)
+
+## 0.28.3 - 2021-01-12
+
+- fix: Return public access to unsafe `ContextUtils` api.  Remove bincompat issue from 0.27.1. (#2072)
+- fix: When time rewinds, avoid throwing exceptions and crashing the disruptor thread. (#2071)
+
+## 0.28.2 - 2020-10-22
+
+- feat: Make TracerImpl public for OpenTelemetry migration. (#2064) 
+
+Note: no binary available for 0.28.2
+
+## 0.28.1 - 2020-10-21
+
+- feat: Add Support for Opencensus to OpenTelemetry migration (#2059)
+
+Breaking change: ContextUtils is no longer public
+
+## 0.28.0 - 2020-10-21
+
+- Remove finalize from RecordEventsSpanImpl (#2043)
+
+## 0.27.0 - 2020-08-14
+- deps: update protobuf (#2029)
+- Update release versions for all readme and build files. (#2028)
+- deps: update Guava to 29.0 (#2032)
+- Add more InstanaExporterHandler tests (#2014)
+- feat: Allow users to specify a metric display name prefix, separately from the metric name prefix (#2050)
+
+## 0.26.0 - 2020-03-19
+- feat: Allow users to register the same Meter multiple times without exception (#2017)
+- update gRPC (#2024): Since gRPC v1.27.0, census dependency is removed from grpc-core. A grpc-census
+artifact now exists to provide interceptor/stream tracer factory that applies census stats/tracing features.
+For users not using gRPC’s integration of census, nothing should be affected. Users who want census integration
+need to add grpc-census artifact to runtime classpath.
+- deps: update GSON (#2025)
+- deps: update auth libraries (#2023)
+- update guava (#2018)
+
+## 0.25.0 - 2020-02-12
+- Add an experimental artifact `opencensus-contrib-observability-ready-util`, that allows users to
+use OpenCensus easily.
+
+## 0.24.0 - 2019-08-27
+- Remove `CONTEXT_SPAN_KEY` and `TAG_CONTEXT_KEY` from API. This will be a breaking change to those who
+depend on these two keys, but anyone except gRPC shouldn't use it directly anyway.
+
+## 0.23.0 - 2019-06-12
+- Make `StackdriverStatsExporter.unregister()` a public API.
+- Add support spring plugin for http servlet and AsyncRestTemplate.
+
+## 0.22.1 - 2019-05-21
+- Increase the buffer size for the trace export batch to 2500 (previously it was 32).
+
+## 0.22.0 - 2019-05-14
+- Disable RunningSpanStore by default unless the z-pages contrib is linked and initialized. This may
+break behaviors for applications that rely on this to be always enabled.
+- Provide a `Deadline` option to Stackdriver Trace exporter. Default value is 10 seconds if it's not set.
+- Provide a `Deadline` option to Stackdriver Stats exporter. Default value is 10 seconds.
+Also provide a `MetricServiceStub` option so that advanced users can use a custom Stackdriver
+Monitoring client to make RPCs.
+- Use `Configuration` builder pattern for creating `JaegerTraceExporter`, `ZipkinTraceExporter` and
+`InstanaTraceExporter`. Provide a `Deadline` option with default value 10 seconds.
+- Provide a `Deadline` option to Datadog, Elasticsearch and OcAgent exporter. Default value is 10 seconds.
+- Extract the common timeout logic of Trace exporters to `opencensus-exporter-trace-util`.
+
+## 0.21.0 - 2019-04-30
+- Add HTTP text format serializer to Tag propagation component.
+- Support constant labels in Gauge APIs.
+- Add an option to allow users to override the default "opencensus_task" metric label in Stackdriver Stats Exporter.
+- Allow setting custom namespace in Prometheus exporter.
+- Add Cumulative (`DoubleCumulative`, `LongCumulative`, `DerivedDoubleCumulative`, `DerivedLongCumulative`) APIs.
+- Add convenience APIs `TagContextBuilder.putLocal()` that adds non-propagating tags,
+and `TagContextBuilder.putPropagating()` that adds unlimited propagating tags.
+- Deprecate context keys for tags and spans. Provide helper methods for interacting with context.
+
+## 0.20.0 - 2019-03-28
+- Add OpenCensus Java OC-Agent Trace Exporter.
+- Add OpenCensus Java OC-Agent Metrics Exporter.
+- Add config option for Http-Servlet.
+- Add config option for Jetty Http Client.
+- Modified default value to false for publicEndpoint property in Http-Servlet.
+- Add a generic `AttachmentValue` class to support `Exemplar`.
+- Add Elasticsearch Trace Exporter.
+- Add `metrics.data` package to hold common classes shared between stats and metrics.
+- Refactor `Exemplar` and `AttachmentValue` to be under `metrics.data`. Note that this is a breaking change
+if you're using the `Exemplar` classes or APIs in the previous releases.
+- Add `TagMetadata` that defines the properties associated with a `Tag`.
+- Add `QueueMetricProducer` that supports pushing and buffering `Metric`s.
+
+## 0.19.0 - 2019-01-28
+- Add an artifact `opencensus-contrib-http-jetty-client` for instrumenting jetty http client. Add extractor for Jetty Client.
+- Add an artifact `opencensus-contrib-http-servlets` for instrumenting http servlets. Add extractor for Http Servlets.
+- Add support generic http server handler.
+- Add support for generic http client handler.
+- Add ability to filter metrics collected from Dropwizard registry.
+- Add an util artifact opencensus-contrib-dropwizard5 to translate Dropwizard metrics5 to OpenCensus.
+- Add metrics util package to be shared by all metrics exporters.
+- Add Datadog Trace Exporter.
+
+## 0.18.0 - 2018-11-27
+- Set the
+  [`trace_sampled` field](https://github.com/googleapis/googleapis/blob/8027f17420d5a323c7dfef1ae0e57d82f3b97430/google/logging/v2/log_entry.proto#L143-L149) in the Stackdriver `LogEntry` protocol buffer in `opencensus-contrib-log-correlation-stackdriver`.
+- Add support for w3c/distributed-tracing propagation format.
+- Add gRPC measures and views for real-time metrics in streaming RPCs.
+- Add Summary Metric support for Stackdriver exporter.
+- Reduce CPU usage for low qps applications.
+
+## 0.17.0 - 2018-11-02
 - Add `AttributeValueDouble` to `AttributeValue`.
 - Add `createWithSender` to `JaegerTraceExporter` to allow use of `HttpSender`
   with extra configurations.
@@ -6,7 +117,17 @@
 - Migrate to new Stackdriver Kubernetes monitored resource. This could be a breaking change
   if you are using `gke_container` resources. For more info,
   https://cloud.google.com/monitoring/kubernetes-engine/migration#incompatible
-- Add OpenCensus Java OC-Agent Trace Exporter.
+- Add an util artifact `opencensus-contrib-dropwizard` to translate Dropwizard metrics to
+  OpenCensus.
+- Add Gauges (`DoubleGauge`, `LongGauge`, `DerivedDoubleGauge`, `DerivedLongGauge`) APIs.
+- Update `opencensus-contrib-log-correlation-log4j2` and
+  `opencensus-contrib-log-correlation-stackdriver` to match the
+  [OpenCensus log correlation spec](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/LogCorrelation.md)
+  and remove all `ExperimentalApi` annotations.
+- The histogram bucket boundaries (`BucketBoundaries`) and values (`Count` and `Sum`) are no longer
+  supported for negative values. The Record API drops the negative `value` and logs the warning.
+  This could be a breaking change if you are recording negative value for any `measure`.
+- Remove support for min/max in the stats Distribution to make it compatible with Metrics.
 
 ## 0.16.1 - 2018-09-18
 - Fix ClassCastException in Log4j log correlation
@@ -19,10 +140,10 @@
 - Add an API MeasureMap.putAttachment() for recording exemplars.
 - Add Exemplar class and an API to get Exemplar list to DistributionData.
 - Improve the styling of Rpcz, Statsz, Tracez, and Traceconfigz pages.
-- Add an artifact `opencensus-contrib-exemplar-util` that has helper utilities 
+- Add an artifact `opencensus-contrib-exemplar-util` that has helper utilities
   on recording exemplars.
 - Reduce the default limit on `Link`s per `Span` to 32 (was 128 before).
-- Add Spring support for `@Traced` annotation and java.sql.PreparedStatements 
+- Add Spring support for `@Traced` annotation and java.sql.PreparedStatements
   tracing.
 - Allow custom prefix for Stackdriver metrics in `StackdriverStatsConfiguration`.
 - Add support to handle the Tracestate in the SpanContext.
@@ -85,7 +206,7 @@
 - Add `Duration.toMillis()`.
 - Make monitored resource utils a separate artifact `opencensus-contrib-monitored-resource-util`,
   so that it can be reused across exporters.
-- Add `LastValue`, `LastValueDouble` and `LastValueLong`. Also support them in 
+- Add `LastValue`, `LastValueDouble` and `LastValueLong`. Also support them in
   stats exporters and zpages. Please note that there is an API breaking change
   in methods `Aggregation.match()` and `AggregationData.match()`.
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 91279cc..b7d1059 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -93,6 +93,18 @@
 
 `gradlew.bat clean assemble -PcheckerFramework`
 
+Run the following command to install a local snapshot of OpenCensus. This 
+can be a very useful tool for testing changes locally against another 
+project that uses OpenCensus.
+
+### OS X or Linux
+
+`./gradlew install`
+
+### Windows
+
+`gradlew.bat install`
+
 ### Checker Framework null analysis
 
 OpenCensus uses the [Checker Framework](https://checkerframework.org/) to
diff --git a/METADATA b/METADATA
index e4f2323..08b0a5d 100644
--- a/METADATA
+++ b/METADATA
@@ -1,16 +1,20 @@
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update external/opencensus-java
+# For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md
+
 name: "opencensus-java"
 description: "A stats collection and distributed tracing framework"
 third_party {
   license_type: NOTICE
   last_upgrade_date {
-    year: 2018
-    month: 10
-    day: 19
+    year: 2024
+    month: 5
+    day: 20
   }
   homepage: "https://opencensus.io"
   identifier {
     type: "Git"
     value: "https://github.com/census-instrumentation/opencensus-java"
-    version: "633fde4378905bffb967b30857257427cced4228"
+    version: "baa68680b19040ff7739e314a2422a25cf41e27a"
   }
 }
diff --git a/README.md b/README.md
index 0859e7d..e9276e5 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,11 @@
+> **Warning**
+>
+> OpenCensus and OpenTracing have merged to form [OpenTelemetry](https://opentelemetry.io), which serves as the next major version of OpenCensus and OpenTracing.
+>
+> OpenTelemetry has now reached feature parity with OpenCensus, with tracing and metrics SDKs available in .NET, Golang, Java, NodeJS, and Python. **All OpenCensus Github repositories, except [census-instrumentation/opencensus-python](https://github.com/census-instrumentation/opencensus-python), will be archived on July 31st, 2023**. We encourage users to migrate to OpenTelemetry by this date.
+>
+> To help you gradually migrate your instrumentation to OpenTelemetry, bridges are available in Java, Go, Python, and JS. [**Read the full blog post to learn more**](https://opentelemetry.io/blog/2023/sunsetting-opencensus/).
+
 # OpenCensus - A stats collection and distributed tracing framework
 [![Gitter chat][gitter-image]][gitter-url]
 [![Maven Central][maven-image]][maven-url]
@@ -6,22 +14,35 @@
 [![Windows Build Status][appveyor-image]][appveyor-url]
 [![Coverage Status][codecov-image]][codecov-url]
 
+> :exclamation: The [opencensus-contrib-log-correlation-log4j2](https://github.com/census-instrumentation/opencensus-java/tree/master/contrib/log_correlation/stackdriver)
+> Java client library  is part of the OpenCensus project.
+> [CVE-2021-44228](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44228) 
+> and [CVE-2021-45046](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45046) disclosed
+> security vulnerabilities in the Apache Log4j 2 version 2.15 or below. The recent version
+> v0.28.3 depends on Log4j 2.11.1. A number of previous versions also depend on vulnerable 
+> Log4j versions.
+> 
+> :exclamation: We merged several fixes and published a release that depends on a safe version of 
+> Log4j (2.16). **We strongly encourage customers who depend on the 
+> opencensus-contrib-log-correlation-log4j2 library to upgrade to the latest 
+> release [(v0.30.0)](https://repo1.maven.org/maven2/io/opencensus/opencensus-contrib-log-correlation-log4j2/0.30.0/).**
 
 OpenCensus is a toolkit for collecting application performance and behavior data. It currently
 includes 3 apis: stats, tracing and tags.
 
-The library is in [Beta](#versioning) stage and APIs are expected to be mostly stable. The 
-library is expected to move to [GA](#versioning) stage after v1.0.0 major release.
+The library is in [Beta](#versioning) stage and APIs are expected to be mostly stable.
 
 Please join [gitter](https://gitter.im/census-instrumentation/Lobby) for help or feedback on this
 project.
 
+**OpenCensus and OpenTracing have merged to form OpenTelemetry, which serves as the next major version of OpenCensus and OpenTracing. OpenTelemetry will offer backwards compatibility with existing OpenCensus integrations, and we will continue to make security patches to existing OpenCensus libraries for two years. Read more about the merger [here](https://medium.com/opentracing/a-roadmap-to-convergence-b074e5815289).**
+
 ## OpenCensus Quickstart for Libraries
 
 Integrating OpenCensus with a new library means recording stats or traces and propagating context.
 For application integration please see [Quickstart for Applications](https://github.com/census-instrumentation/opencensus-java#quickstart-for-applications).
 
-The full quick start example can also be found on the [OpenCensus website](https://opencensus.io/java/index.html).
+The full quick start example can also be found on the [OpenCensus website](https://opencensus.io/quickstart/java/).
 
 ### Add the dependencies to your project
 
@@ -31,33 +52,16 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.30.0</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.30.0'
 ```
 
-For Bazel add the following lines to the WORKSPACE file:
-```
-maven_jar(
-    name = "io_opencensus_opencensus_api",
-    artifact = "io.opencensus:opencensus-api:0.15.0",
-    sha1 = "9a098392b287d7924660837f4eba0ce252013683",
-)
-```
-Then targets can specify `@io_opencensus_opencensus_api//jar` as a dependency to depend on this jar:
-```bazel
-deps = [
-    "@io_opencensus_opencensus_api//jar",
-]
-```
-You may also need to import the transitive dependencies. See [generate external dependencies from 
-Maven projects](https://docs.bazel.build/versions/master/generate-workspace.html).
-
 ### Hello "OpenCensus" trace events
 
 Here's an example of creating a Span and record some trace annotations. Notice that recording the
@@ -198,56 +202,23 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.30.0</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.30.0</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.30.0'
+runtime 'io.opencensus:opencensus-impl:0.30.0'
 ```
 
-For Bazel add the following lines to the WORKSPACE file:
-```
-maven_jar(
-    name = "io_opencensus_opencensus_api",
-    artifact = "io.opencensus:opencensus-api:0.15.0",
-    sha1 = "9a098392b287d7924660837f4eba0ce252013683",
-)
-
-maven_jar(
-    name = "io_opencensus_opencensus_impl_core",
-    artifact = "io.opencensus:opencensus-impl-core:0.15.0",
-    sha1 = "36c775926ba1e54af7c37d0503cfb99d986f6229",
-)
-
-maven_jar(
-    name = "io_opencensus_opencensus_impl",
-    artifact = "io.opencensus:opencensus-impl:0.15.0",
-    sha1 = "d7bf0d7ee5a0594f840271c11c9f8d6f754f35d6",
-)
-```
-Then add the following lines to BUILD.bazel file:
-```bazel
-deps = [
-    "@io_opencensus_opencensus_api//jar",
-]
-runtime_deps = [
-    "@io_opencensus_opencensus_impl_core//jar",
-    "@io_opencensus_opencensus_impl//jar",
-]
-```
-Again you may need to import the transitive dependencies. See [generate external dependencies from 
-Maven projects](https://docs.bazel.build/versions/master/generate-workspace.html).
-
 ### How to setup exporters?
 
 #### Trace exporters
diff --git a/RELEASING.md b/RELEASING.md
index 649ac81..0c3f51c 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -65,7 +65,10 @@
       examples/build.gradle
       examples/pom.xml
       api/src/main/java/io/opencensus/common/OpenCensusLibraryInformation.java
+      exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentNodeUtils.java
       exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtils.java
+      examples/spring/servlet/build.gradle
+      examples/spring/servlet/pom.xml
       )
     $ git checkout -b v$MAJOR.$MINOR.x master
     $ git push upstream v$MAJOR.$MINOR.x
@@ -121,7 +124,8 @@
     ```
 
     -   Go through PR review and push the release tag and updated release branch
-        to GitHub:
+        to GitHub (note: do not squash the commits when you merge otherwise you
+        will lose the release tag):
 
     ```bash
     $ git checkout v$MAJOR.$MINOR.x
@@ -145,7 +149,7 @@
 $ git checkout -b v$MAJOR.$MINOR.$PATCH tags/v$MAJOR.$MINOR.$PATCH
 ```
 
-### Initial Deployment
+### Building and Deploying
 
 The following command will build the whole project and upload it to Maven
 Central. Parallel building [is not safe during
@@ -203,18 +207,24 @@
 $ README_FILES=(
   README.md
   contrib/appengine_standard_util/README.md
+  contrib/dropwizard/README.md
+  contrib/dropwizard5/README.md
   contrib/exemplar_util/README.md
   contrib/grpc_util/README.md
+  contrib/http_jaxrs/README.md
+  contrib/http_jetty_client/README.md
+  contrib/http_servlet/README.md
   contrib/http_util/README.md
   contrib/log_correlation/log4j2/README.md
   contrib/log_correlation/stackdriver/README.md
-  contrib/monitored_resource_util/README.md
   contrib/spring/README.md
   contrib/spring_sleuth_v1x/README.md
   contrib/zpages/README.md
   exporters/stats/prometheus/README.md
   exporters/stats/signalfx/README.md
   exporters/stats/stackdriver/README.md
+  exporters/trace/datadog/README.md
+  exporters/trace/elasticsearch/README.md
   exporters/trace/instana/README.md
   exporters/trace/logging/README.md
   exporters/trace/jaeger/README.md
@@ -231,56 +241,23 @@
 # Substitute versions in maven pom examples in README.md
 $ sed -i 's/\(<version>\)[0-9]\+\.[0-9]\+\.[0-9]\+/\1'$MAJOR.$MINOR.$PATCH'/' \
  "${README_FILES[@]}"
+$ git commit -a -m "Update release versions for all readme and build files." 
 ```
 
-2. Update bazel dependencies for subproject `examples`:
+2. Go through PR review and merge it to GitHub master branch.
 
-    - Follow the instructions on [this
-    page](https://docs.bazel.build/versions/master/generate-workspace.html) to
-    install bazel migration tool. You may also need to manually apply
-    this [patch](
-    https://github.com/nevillelyh/migration-tooling/commit/f10e14fd18ad3885c7ec8aa305e4eba266a07ebf)
-    if you encounter `Unable to find a version for ... due to Invalid Range Result` error when
-    using it.
-
-    - Use the following command to generate new dependencies file:
-
-    ```bash
-    $ bazel run //generate_workspace -- \
-    --artifact=com.google.guava:guava-jdk5:23.0
-    --artifact=com.google.guava:guava:23.0 \
-    --artifact=io.grpc:grpc-all:1.9.0 \
-    --artifact=io.opencensus:opencensus-api:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-contrib-grpc-metrics:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-contrib-zpages:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-exporter-stats-prometheus:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-exporter-stats-stackdriver:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-exporter-trace-logging:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-exporter-trace-stackdriver:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.opencensus:opencensus-impl:$MAJOR.$MINOR.$PATCH \
-    --artifact=io.prometheus:simpleclient_httpserver:0.3.0 \
-    --repositories=http://repo.maven.apache.org/maven2
-    Wrote
-    /usr/local/.../generate_workspace.runfiles/__main__/generate_workspace.bzl
-    ```
-
-    - Copy this file to overwrite `examples/opencensus_workspace.bzl`.
-
-    - Use the following command to rename the generated rules and commit the
-      changes above:
-
-    ```bash
-    $ sed -i 's/def generated_/def opencensus_/' examples/opencensus_workspace.bzl
-    $ git commit -a -m "Update release versions for all readme and build files."
-    ```
-
-3. Go through PR review and merge it to GitHub master branch.
-
-4. In addition, create a PR to mark the new release in 
+3. In addition, create a PR to mark the new release in 
 [CHANGELOG.md](https://github.com/census-instrumentation/opencensus-java/blob/master/CHANGELOG.md)
 on master branch. Once that PR is merged, cherry-pick the commit and create another PR to the 
 release branch (branch v$MAJOR.$MINOR.x).
 
+## Patch Release
+All patch releases should include only bug-fixes, and must avoid adding/modifying the public APIs.
+To cherry-pick one commit use the following command:
+```bash
+$ COMMIT=1224f0a # Set the right commit hash.
+$ git cherry-pick -x $COMMIT
+```
 
 ## Known Issues
 
diff --git a/all/build.gradle b/all/build.gradle
index d46f2f6..83bd97b 100644
--- a/all/build.gradle
+++ b/all/build.gradle
@@ -9,20 +9,30 @@
         project(':opencensus-contrib-agent'),
         project(':opencensus-contrib-appengine-standard-util'),
         project(':opencensus-contrib-dropwizard'),
+        project(':opencensus-contrib-dropwizard5'),
         project(':opencensus-contrib-exemplar-util'),
-        project(':opencensus-contrib-grpc-util'),
         project(':opencensus-contrib-grpc-metrics'),
+        project(':opencensus-contrib-grpc-util'),
+        project(':opencensus-contrib-http-jaxrs'),
+        project(':opencensus-contrib-http-jetty-client'),
+        project(':opencensus-contrib-http-servlet'),
         project(':opencensus-contrib-http-util'),
         project(':opencensus-contrib-log-correlation-stackdriver'),
-        project(':opencensus-contrib-monitored-resource-util'),
+        project(':opencensus-contrib-observability-ready-util'),
+        project(':opencensus-contrib-resource-util'),
         project(':opencensus-contrib-spring'),
         project(':opencensus-contrib-spring-sleuth-v1x'),
+        project(':opencensus-contrib-spring-starter'),
         project(':opencensus-contrib-zpages'),
+        project(':opencensus-exporter-metrics-ocagent'),
+        project(':opencensus-exporter-metrics-util'),
         project(':opencensus-exporter-trace-logging'),
+        project(':opencensus-exporter-trace-elasticsearch'),
         project(':opencensus-exporter-trace-ocagent'),
         project(':opencensus-exporter-trace-stackdriver'),
         project(':opencensus-exporter-trace-zipkin'),
         project(':opencensus-exporter-trace-jaeger'),
+        project(':opencensus-exporter-trace-util'),
         project(':opencensus-exporter-stats-signalfx'),
         project(':opencensus-exporter-stats-stackdriver'),
         project(':opencensus-exporter-stats-prometheus'),
@@ -36,19 +46,28 @@
         project(':opencensus-contrib-appengine-standard-util'),
         project(':opencensus-contrib-dropwizard'),
         project(':opencensus-contrib-exemplar-util'),
-        project(':opencensus-contrib-grpc-util'),
         project(':opencensus-contrib-grpc-metrics'),
+        project(':opencensus-contrib-grpc-util'),
+        project(':opencensus-contrib-http-jaxrs'),
+        project(':opencensus-contrib-http-jetty-client'),
+        project(':opencensus-contrib-http-servlet'),
         project(':opencensus-contrib-http-util'),
         project(':opencensus-contrib-log-correlation-stackdriver'),
-        project(':opencensus-contrib-monitored-resource-util'),
+        project(':opencensus-contrib-observability-ready-util'),
+        project(':opencensus-contrib-resource-util'),
         project(':opencensus-contrib-spring'),
         project(':opencensus-contrib-spring-sleuth-v1x'),
+        project(':opencensus-contrib-spring-starter'),
         project(':opencensus-contrib-zpages'),
+        project(':opencensus-exporter-metrics-ocagent'),
+        project(':opencensus-exporter-metrics-util'),
         project(':opencensus-exporter-trace-logging'),
+        project(':opencensus-exporter-trace-elasticsearch'),
         project(':opencensus-exporter-trace-ocagent'),
         project(':opencensus-exporter-trace-stackdriver'),
         project(':opencensus-exporter-trace-zipkin'),
         project(':opencensus-exporter-trace-jaeger'),
+        project(':opencensus-exporter-trace-util'),
         project(':opencensus-exporter-stats-signalfx'),
         project(':opencensus-exporter-stats-stackdriver'),
         project(':opencensus-exporter-stats-prometheus'),
diff --git a/api/src/main/java/io/opencensus/common/OpenCensusLibraryInformation.java b/api/src/main/java/io/opencensus/common/OpenCensusLibraryInformation.java
index 3f659c1..599c3d4 100644
--- a/api/src/main/java/io/opencensus/common/OpenCensusLibraryInformation.java
+++ b/api/src/main/java/io/opencensus/common/OpenCensusLibraryInformation.java
@@ -29,7 +29,7 @@
    *
    * @since 0.8
    */
-  public static final String VERSION = "0.17.0-SNAPSHOT"; // CURRENT_OPENCENSUS_VERSION
+  public static final String VERSION = "0.32.0-SNAPSHOT"; // CURRENT_OPENCENSUS_VERSION
 
   private OpenCensusLibraryInformation() {}
 }
diff --git a/api/src/main/java/io/opencensus/internal/Utils.java b/api/src/main/java/io/opencensus/internal/Utils.java
index df5c984..1a3a1cd 100644
--- a/api/src/main/java/io/opencensus/internal/Utils.java
+++ b/api/src/main/java/io/opencensus/internal/Utils.java
@@ -17,6 +17,7 @@
 package io.opencensus.internal;
 
 import java.util.List;
+import java.util.Map;
 
 /*>>>
 import org.checkerframework.checker.nullness.qual.NonNull;
@@ -133,6 +134,22 @@
   }
 
   /**
+   * Throws a {@link NullPointerException} if any of the map elements is null.
+   *
+   * @param map the argument map to check for null.
+   * @param errorMessage the message to use for the exception. Will be converted to a string using
+   *     {@link String#valueOf(Object)}.
+   */
+  public static <K /*>>> extends @NonNull Object*/, V /*>>> extends @NonNull Object*/>
+      void checkMapElementNotNull(Map<K, V> map, @javax.annotation.Nullable Object errorMessage) {
+    for (Map.Entry<K, V> entry : map.entrySet()) {
+      if (entry.getKey() == null || entry.getValue() == null) {
+        throw new NullPointerException(String.valueOf(errorMessage));
+      }
+    }
+  }
+
+  /**
    * Compares two Objects for equality. This functionality is provided by {@code
    * Objects.equal(Object, Object)} in Java 7.
    */
diff --git a/api/src/main/java/io/opencensus/metrics/DerivedDoubleCumulative.java b/api/src/main/java/io/opencensus/metrics/DerivedDoubleCumulative.java
new file mode 100644
index 0000000..68c240e
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/DerivedDoubleCumulative.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.common.ToDoubleFunction;
+import io.opencensus.internal.Utils;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import javax.annotation.concurrent.ThreadSafe;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/**
+ * Derived Double Cumulative metric, to report cumulative measurement of a double value. Cumulative
+ * values can go up or stay the same, but can never go down. Cumulative values cannot be negative.
+ *
+ * <p>Example: Create a Cumulative with an object and a callback function.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *   List<LabelValue> labelValues = Arrays.asList(LabelValue.create("Inbound"));
+ *
+ *   DerivedDoubleCumulative cumulative = metricRegistry.addDerivedDoubleCumulative(
+ *       "processed_jobs", "Processed jobs in a queue", "1", labelKeys);
+ *
+ *   QueueManager queueManager = new QueueManager();
+ *   cumulative.createTimeSeries(labelValues, queueManager,
+ *         new ToDoubleFunction<QueueManager>() {
+ *           {@literal @}Override
+ *           public double applyAsDouble(QueueManager queue) {
+ *             return queue.size();
+ *           }
+ *         });
+ *
+ *   void doWork() {
+ *      // Your code here.
+ *   }
+ * }
+ *
+ * }</pre>
+ *
+ * @since 0.21
+ */
+@ThreadSafe
+public abstract class DerivedDoubleCumulative {
+  /**
+   * Creates a {@code TimeSeries}. The value of a single point in the TimeSeries is observed from a
+   * callback function. This function is invoked whenever metrics are collected, meaning the
+   * reported value is up-to-date. It keeps a {@link WeakReference} to the object and it is the
+   * user's responsibility to manage the lifetime of the object.
+   *
+   * @param labelValues the list of label values.
+   * @param obj the state object from which the function derives a measurement.
+   * @param function the function to be called.
+   * @param <T> the type of the object upon which the function derives a measurement.
+   * @throws NullPointerException if {@code labelValues} is null OR any element of {@code
+   *     labelValues} is null OR {@code function} is null.
+   * @throws IllegalArgumentException if different time series with the same labels already exists
+   *     OR if number of {@code labelValues}s are not equal to the label keys.
+   * @since 0.21
+   */
+  public abstract <T> void createTimeSeries(
+      List<LabelValue> labelValues,
+      /*@Nullable*/ T obj,
+      ToDoubleFunction</*@Nullable*/ T> function);
+
+  /**
+   * Removes the {@code TimeSeries} from the cumulative metric, if it is present.
+   *
+   * @param labelValues the list of label values.
+   * @throws NullPointerException if {@code labelValues} is null.
+   * @since 0.21
+   */
+  public abstract void removeTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Removes all {@code TimeSeries} from the cumulative metric.
+   *
+   * @since 0.21
+   */
+  public abstract void clear();
+
+  /**
+   * Returns the no-op implementation of the {@code DerivedDoubleCumulative}.
+   *
+   * @return the no-op implementation of the {@code DerivedDoubleCumulative}.
+   * @since 0.21
+   */
+  static DerivedDoubleCumulative newNoopDerivedDoubleCumulative(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return NoopDerivedDoubleCumulative.create(name, description, unit, labelKeys);
+  }
+
+  /** No-op implementations of DerivedDoubleCumulative class. */
+  private static final class NoopDerivedDoubleCumulative extends DerivedDoubleCumulative {
+    private final int labelKeysSize;
+
+    static NoopDerivedDoubleCumulative create(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      return new NoopDerivedDoubleCumulative(name, description, unit, labelKeys);
+    }
+
+    /** Creates a new {@code NoopDerivedDoubleCumulative}. */
+    NoopDerivedDoubleCumulative(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      Utils.checkNotNull(name, "name");
+      Utils.checkNotNull(description, "description");
+      Utils.checkNotNull(unit, "unit");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
+      labelKeysSize = labelKeys.size();
+    }
+
+    @Override
+    public <T> void createTimeSeries(
+        List<LabelValue> labelValues,
+        /*@Nullable*/ T obj,
+        ToDoubleFunction</*@Nullable*/ T> function) {
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+      Utils.checkNotNull(function, "function");
+    }
+
+    @Override
+    public void removeTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkNotNull(labelValues, "labelValues");
+    }
+
+    @Override
+    public void clear() {}
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/DerivedDoubleGauge.java b/api/src/main/java/io/opencensus/metrics/DerivedDoubleGauge.java
index 3aaca15..3f700e7 100644
--- a/api/src/main/java/io/opencensus/metrics/DerivedDoubleGauge.java
+++ b/api/src/main/java/io/opencensus/metrics/DerivedDoubleGauge.java
@@ -125,8 +125,7 @@
       Utils.checkNotNull(name, "name");
       Utils.checkNotNull(description, "description");
       Utils.checkNotNull(unit, "unit");
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
       labelKeysSize = labelKeys.size();
     }
 
@@ -135,9 +134,9 @@
         List<LabelValue> labelValues,
         /*@Nullable*/ T obj,
         ToDoubleFunction</*@Nullable*/ T> function) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-      Utils.checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
       Utils.checkNotNull(function, "function");
     }
 
diff --git a/api/src/main/java/io/opencensus/metrics/DerivedLongCumulative.java b/api/src/main/java/io/opencensus/metrics/DerivedLongCumulative.java
new file mode 100644
index 0000000..b1f192f
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/DerivedLongCumulative.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.common.ToLongFunction;
+import io.opencensus.internal.Utils;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import javax.annotation.concurrent.ThreadSafe;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/**
+ * Derived Long Cumulative metric, to report cumulative measurement of an int64 value. Cumulative
+ * values can go up or stay the same, but can never go down. Cumulative values cannot be negative.
+ *
+ * <p>Example: Create a Cumulative with an object and a callback function.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *   List<LabelValue> labelValues = Arrays.asList(LabelValue.create("Inbound"));
+ *
+ *   DerivedLongCumulative cumulative = metricRegistry.addDerivedLongCumulative(
+ *       "processed_jobs", "Total processed jobs in a queue", "1", labelKeys);
+ *
+ *   QueueManager queueManager = new QueueManager();
+ *   cumulative.createTimeSeries(labelValues, queueManager,
+ *         new ToLongFunction<QueueManager>() {
+ *           {@literal @}Override
+ *           public long applyAsLong(QueueManager queue) {
+ *             return queue.size();
+ *           }
+ *         });
+ *
+ *   void doWork() {
+ *      // Your code here.
+ *   }
+ * }
+ *
+ * }</pre>
+ *
+ * @since 0.21
+ */
+@ThreadSafe
+public abstract class DerivedLongCumulative {
+  /**
+   * Creates a {@code TimeSeries}. The value of a single point in the TimeSeries is observed from a
+   * callback function. This function is invoked whenever metrics are collected, meaning the
+   * reported value is up-to-date. It keeps a {@link WeakReference} to the object and it is the
+   * user's responsibility to manage the lifetime of the object.
+   *
+   * @param labelValues the list of label values.
+   * @param obj the state object from which the function derives a measurement.
+   * @param function the function to be called.
+   * @param <T> the type of the object upon which the function derives a measurement.
+   * @throws NullPointerException if {@code labelValues} is null OR any element of {@code
+   *     labelValues} is null OR {@code function} is null.
+   * @throws IllegalArgumentException if different time series with the same labels already exists
+   *     OR if number of {@code labelValues}s are not equal to the label keys.
+   * @since 0.21
+   */
+  public abstract <T> void createTimeSeries(
+      List<LabelValue> labelValues, /*@Nullable*/ T obj, ToLongFunction</*@Nullable*/ T> function);
+
+  /**
+   * Removes the {@code TimeSeries} from the cumulative metric, if it is present.
+   *
+   * @param labelValues the list of label values.
+   * @throws NullPointerException if {@code labelValues} is null.
+   * @since 0.21
+   */
+  public abstract void removeTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Removes all {@code TimeSeries} from the cumulative metric.
+   *
+   * @since 0.21
+   */
+  public abstract void clear();
+
+  /**
+   * Returns the no-op implementation of the {@code DerivedLongCumulative}.
+   *
+   * @return the no-op implementation of the {@code DerivedLongCumulative}.
+   * @since 0.21
+   */
+  static DerivedLongCumulative newNoopDerivedLongCumulative(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return NoopDerivedLongCumulative.create(name, description, unit, labelKeys);
+  }
+
+  /** No-op implementations of DerivedLongCumulative class. */
+  private static final class NoopDerivedLongCumulative extends DerivedLongCumulative {
+    private final int labelKeysSize;
+
+    static NoopDerivedLongCumulative create(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      return new NoopDerivedLongCumulative(name, description, unit, labelKeys);
+    }
+
+    /** Creates a new {@code NoopDerivedLongCumulative}. */
+    NoopDerivedLongCumulative(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      Utils.checkNotNull(name, "name");
+      Utils.checkNotNull(description, "description");
+      Utils.checkNotNull(unit, "unit");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
+      labelKeysSize = labelKeys.size();
+    }
+
+    @Override
+    public <T> void createTimeSeries(
+        List<LabelValue> labelValues,
+        /*@Nullable*/ T obj,
+        ToLongFunction</*@Nullable*/ T> function) {
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+      Utils.checkNotNull(function, "function");
+    }
+
+    @Override
+    public void removeTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkNotNull(labelValues, "labelValues");
+    }
+
+    @Override
+    public void clear() {}
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/DerivedLongGauge.java b/api/src/main/java/io/opencensus/metrics/DerivedLongGauge.java
index 621873f..8cf3977 100644
--- a/api/src/main/java/io/opencensus/metrics/DerivedLongGauge.java
+++ b/api/src/main/java/io/opencensus/metrics/DerivedLongGauge.java
@@ -123,8 +123,7 @@
       Utils.checkNotNull(name, "name");
       Utils.checkNotNull(description, "description");
       Utils.checkNotNull(unit, "unit");
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
       labelKeysSize = labelKeys.size();
     }
 
@@ -133,9 +132,9 @@
         List<LabelValue> labelValues,
         /*@Nullable*/ T obj,
         ToLongFunction</*@Nullable*/ T> function) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-      Utils.checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
       Utils.checkNotNull(function, "function");
     }
 
diff --git a/api/src/main/java/io/opencensus/metrics/DoubleCumulative.java b/api/src/main/java/io/opencensus/metrics/DoubleCumulative.java
new file mode 100644
index 0000000..508500f
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/DoubleCumulative.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.internal.Utils;
+import java.util.List;
+import javax.annotation.concurrent.ThreadSafe;
+
+/**
+ * Double Cumulative metric, to report instantaneous measurement of a double value. Cumulative
+ * values can go up or stay the same, but can never go down. Cumulative values cannot be negative.
+ *
+ * <p>Example 1: Create a Cumulative with default labels.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *
+ *   DoubleCumulative cumulative = metricRegistry.addDoubleCumulative("processed_jobs",
+ *                       "Processed jobs", "1", labelKeys);
+ *
+ *   // It is recommended to keep a reference of a point for manual operations.
+ *   DoublePoint defaultPoint = cumulative.getDefaultTimeSeries();
+ *
+ *   void doWork() {
+ *      // Your code here.
+ *      defaultPoint.add(10);
+ *   }
+ *
+ * }
+ * }</pre>
+ *
+ * <p>Example 2: You can also use labels(keys and values) to track different types of metric.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *   List<LabelValue> labelValues = Arrays.asList(LabelValue.create("Inbound"));
+ *
+ *   DoubleCumulative cumulative = metricRegistry.addDoubleCumulative("processed_jobs",
+ *                       "Processed jobs", "1", labelKeys);
+ *
+ *   // It is recommended to keep a reference of a point for manual operations.
+ *   DoublePoint inboundPoint = cumulative.getOrCreateTimeSeries(labelValues);
+ *
+ *   void doSomeWork() {
+ *      // Your code here.
+ *      inboundPoint.set(15);
+ *   }
+ *
+ * }
+ * }</pre>
+ *
+ * @since 0.21
+ */
+@ThreadSafe
+public abstract class DoubleCumulative {
+
+  /**
+   * Creates a {@code TimeSeries} and returns a {@code DoublePoint} if the specified {@code
+   * labelValues} is not already associated with this cumulative, else returns an existing {@code
+   * DoublePoint}.
+   *
+   * <p>It is recommended to keep a reference to the DoublePoint instead of always calling this
+   * method for manual operations.
+   *
+   * @param labelValues the list of label values. The number of label values must be the same to
+   *     that of the label keys passed to {@link MetricRegistry#addDoubleCumulative}.
+   * @return a {@code DoublePoint} the value of single cumulative.
+   * @throws NullPointerException if {@code labelValues} is null OR any element of {@code
+   *     labelValues} is null.
+   * @throws IllegalArgumentException if number of {@code labelValues}s are not equal to the label
+   *     keys.
+   * @since 0.21
+   */
+  public abstract DoublePoint getOrCreateTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Returns a {@code DoublePoint} for a cumulative with all labels not set, or default labels.
+   *
+   * @return a {@code DoublePoint} for a cumulative with all labels not set, or default labels.
+   * @since 0.21
+   */
+  public abstract DoublePoint getDefaultTimeSeries();
+
+  /**
+   * Removes the {@code TimeSeries} from the cumulative metric, if it is present. i.e. references to
+   * previous {@code DoublePoint} objects are invalid (not part of the metric).
+   *
+   * @param labelValues the list of label values.
+   * @throws NullPointerException if {@code labelValues} is null or any element of {@code
+   *     labelValues} is null.
+   * @since 0.21
+   */
+  public abstract void removeTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Removes all {@code TimeSeries} from the cumulative metric. i.e. references to all previous
+   * {@code DoublePoint} objects are invalid (not part of the metric).
+   *
+   * @since 0.21
+   */
+  public abstract void clear();
+
+  /**
+   * Returns the no-op implementation of the {@code DoubleCumulative}.
+   *
+   * @return the no-op implementation of the {@code DoubleCumulative}.
+   * @since 0.21
+   */
+  static DoubleCumulative newNoopDoubleCumulative(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return NoopDoubleCumulative.create(name, description, unit, labelKeys);
+  }
+
+  /**
+   * The value of a single point in the Cumulative.TimeSeries.
+   *
+   * @since 0.21
+   */
+  public abstract static class DoublePoint {
+
+    /**
+     * Adds the given value to the current value. The values cannot be negative.
+     *
+     * @param delta the value to add
+     * @since 0.21
+     */
+    public abstract void add(double delta);
+  }
+
+  /** No-op implementations of DoubleCumulative class. */
+  private static final class NoopDoubleCumulative extends DoubleCumulative {
+    private final int labelKeysSize;
+
+    static NoopDoubleCumulative create(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      return new NoopDoubleCumulative(name, description, unit, labelKeys);
+    }
+
+    /** Creates a new {@code NoopDoublePoint}. */
+    NoopDoubleCumulative(String name, String description, String unit, List<LabelKey> labelKeys) {
+      Utils.checkNotNull(name, "name");
+      Utils.checkNotNull(description, "description");
+      Utils.checkNotNull(unit, "unit");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
+      labelKeysSize = labelKeys.size();
+    }
+
+    @Override
+    public NoopDoublePoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+      return NoopDoublePoint.INSTANCE;
+    }
+
+    @Override
+    public NoopDoublePoint getDefaultTimeSeries() {
+      return NoopDoublePoint.INSTANCE;
+    }
+
+    @Override
+    public void removeTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkNotNull(labelValues, "labelValues");
+    }
+
+    @Override
+    public void clear() {}
+
+    /** No-op implementations of DoublePoint class. */
+    private static final class NoopDoublePoint extends DoublePoint {
+      private static final NoopDoublePoint INSTANCE = new NoopDoublePoint();
+
+      private NoopDoublePoint() {}
+
+      @Override
+      public void add(double delta) {}
+    }
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/DoubleGauge.java b/api/src/main/java/io/opencensus/metrics/DoubleGauge.java
index 3275997..067b976 100644
--- a/api/src/main/java/io/opencensus/metrics/DoubleGauge.java
+++ b/api/src/main/java/io/opencensus/metrics/DoubleGauge.java
@@ -171,16 +171,15 @@
       Utils.checkNotNull(name, "name");
       Utils.checkNotNull(description, "description");
       Utils.checkNotNull(unit, "unit");
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
       labelKeysSize = labelKeys.size();
     }
 
     @Override
     public NoopDoublePoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-      Utils.checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
       return NoopDoublePoint.INSTANCE;
     }
 
diff --git a/api/src/main/java/io/opencensus/metrics/LongCumulative.java b/api/src/main/java/io/opencensus/metrics/LongCumulative.java
new file mode 100644
index 0000000..80b99d0
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/LongCumulative.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.internal.Utils;
+import java.util.List;
+import javax.annotation.concurrent.ThreadSafe;
+
+/**
+ * Long Cumulative metric, to report instantaneous measurement of an int64 value. Cumulative values
+ * can go up or stay the same, but can never go down. Cumulative values cannot be negative.
+ *
+ * <p>Example 1: Create a Cumulative with default labels.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *
+ *   LongCumulative cumulative = metricRegistry.addLongCumulative(
+ *     "processed_jobs", "Processed jobs", "1", labelKeys);
+ *
+ *   // It is recommended to keep a reference of a point for manual operations.
+ *   LongPoint defaultPoint = cumulative.getDefaultTimeSeries();
+ *
+ *   void doWork() {
+ *      // Your code here.
+ *      defaultPoint.add(10);
+ *   }
+ *
+ * }
+ * }</pre>
+ *
+ * <p>Example 2: You can also use labels(keys and values) to track different types of metric.
+ *
+ * <pre>{@code
+ * class YourClass {
+ *
+ *   private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+ *
+ *   List<LabelKey> labelKeys = Arrays.asList(LabelKey.create("Name", "desc"));
+ *   List<LabelValue> labelValues = Arrays.asList(LabelValue.create("Inbound"));
+ *
+ *   LongCumulative cumulative = metricRegistry.addLongCumulative(
+ *     "processed_jobs", "Processed jobs", "1", labelKeys);
+ *
+ *   // It is recommended to keep a reference of a point for manual operations.
+ *   LongPoint inboundPoint = cumulative.getOrCreateTimeSeries(labelValues);
+ *
+ *   void doSomeWork() {
+ *      // Your code here.
+ *      inboundPoint.set(15);
+ *   }
+ *
+ * }
+ * }</pre>
+ *
+ * @since 0.21
+ */
+@ThreadSafe
+public abstract class LongCumulative {
+
+  /**
+   * Creates a {@code TimeSeries} and returns a {@code LongPoint} if the specified {@code
+   * labelValues} is not already associated with this cumulative, else returns an existing {@code
+   * LongPoint}.
+   *
+   * <p>It is recommended to keep a reference to the LongPoint instead of always calling this method
+   * for manual operations.
+   *
+   * @param labelValues the list of label values. The number of label values must be the same to
+   *     that of the label keys passed to {@link MetricRegistry#addLongCumulative}.
+   * @return a {@code LongPoint} the value of single cumulative.
+   * @throws NullPointerException if {@code labelValues} is null OR any element of {@code
+   *     labelValues} is null.
+   * @throws IllegalArgumentException if number of {@code labelValues}s are not equal to the label
+   *     keys passed to {@link MetricRegistry#addLongCumulative}.
+   * @since 0.21
+   */
+  public abstract LongPoint getOrCreateTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Returns a {@code LongPoint} for a cumulative with all labels not set, or default labels.
+   *
+   * @return a {@code LongPoint} for a cumulative with all labels not set, or default labels.
+   * @since 0.21
+   */
+  public abstract LongPoint getDefaultTimeSeries();
+
+  /**
+   * Removes the {@code TimeSeries} from the cumulative metric, if it is present. i.e. references to
+   * previous {@code LongPoint} objects are invalid (not part of the metric).
+   *
+   * @param labelValues the list of label values.
+   * @throws NullPointerException if {@code labelValues} is null.
+   * @since 0.21
+   */
+  public abstract void removeTimeSeries(List<LabelValue> labelValues);
+
+  /**
+   * Removes all {@code TimeSeries} from the cumulative metric. i.e. references to all previous
+   * {@code LongPoint} objects are invalid (not part of the metric).
+   *
+   * @since 0.21
+   */
+  public abstract void clear();
+
+  /**
+   * Returns the no-op implementation of the {@code LongCumulative}.
+   *
+   * @return the no-op implementation of the {@code LongCumulative}.
+   * @since 0.21
+   */
+  static LongCumulative newNoopLongCumulative(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return NoopLongCumulative.create(name, description, unit, labelKeys);
+  }
+
+  /**
+   * The value of a single point in the Cumulative.TimeSeries.
+   *
+   * @since 0.21
+   */
+  public abstract static class LongPoint {
+
+    /**
+     * Adds the given value to the current value. The values cannot be negative.
+     *
+     * @param delta the value to add
+     * @since 0.21
+     */
+    public abstract void add(long delta);
+  }
+
+  /** No-op implementations of LongCumulative class. */
+  private static final class NoopLongCumulative extends LongCumulative {
+    private final int labelKeysSize;
+
+    static NoopLongCumulative create(
+        String name, String description, String unit, List<LabelKey> labelKeys) {
+      return new NoopLongCumulative(name, description, unit, labelKeys);
+    }
+
+    /** Creates a new {@code NoopLongPoint}. */
+    NoopLongCumulative(String name, String description, String unit, List<LabelKey> labelKeys) {
+      labelKeysSize = labelKeys.size();
+    }
+
+    @Override
+    public NoopLongPoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+      return NoopLongPoint.INSTANCE;
+    }
+
+    @Override
+    public NoopLongPoint getDefaultTimeSeries() {
+      return NoopLongPoint.INSTANCE;
+    }
+
+    @Override
+    public void removeTimeSeries(List<LabelValue> labelValues) {
+      Utils.checkNotNull(labelValues, "labelValues");
+    }
+
+    @Override
+    public void clear() {}
+
+    /** No-op implementations of LongPoint class. */
+    private static final class NoopLongPoint extends LongPoint {
+      private static final NoopLongPoint INSTANCE = new NoopLongPoint();
+
+      private NoopLongPoint() {}
+
+      @Override
+      public void add(long delta) {}
+    }
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/LongGauge.java b/api/src/main/java/io/opencensus/metrics/LongGauge.java
index 1d4489c..254820e 100644
--- a/api/src/main/java/io/opencensus/metrics/LongGauge.java
+++ b/api/src/main/java/io/opencensus/metrics/LongGauge.java
@@ -170,9 +170,9 @@
 
     @Override
     public NoopLongPoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-      Utils.checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+      Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
+      Utils.checkArgument(
+          labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
       return NoopLongPoint.INSTANCE;
     }
 
diff --git a/api/src/main/java/io/opencensus/metrics/MetricOptions.java b/api/src/main/java/io/opencensus/metrics/MetricOptions.java
new file mode 100644
index 0000000..ac7cd52
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/MetricOptions.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import com.google.auto.value.AutoValue;
+import io.opencensus.internal.Utils;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Options for every metric added to the {@link MetricRegistry}.
+ *
+ * @since 0.20
+ */
+@Immutable
+@AutoValue
+public abstract class MetricOptions {
+
+  /**
+   * Returns the description of the Metric.
+   *
+   * <p>Default value is {@code ""}.
+   *
+   * @return the description of the Metric.
+   * @since 0.20
+   */
+  public abstract String getDescription();
+
+  /**
+   * Returns the unit of the Metric.
+   *
+   * <p>Default value is {@code "1"}.
+   *
+   * @return the unit of the Metric.
+   * @since 0.20
+   */
+  public abstract String getUnit();
+
+  /**
+   * Returns the list of label keys for the Metric.
+   *
+   * <p>Default value is {@link Collections#emptyList()}.
+   *
+   * @return the list of label keys for the Metric.
+   * @since 0.20
+   */
+  public abstract List<LabelKey> getLabelKeys();
+
+  /**
+   * Returns the map of constant labels (they will be added to all the TimeSeries) for the Metric.
+   *
+   * <p>Default value is {@link Collections#emptyMap()}.
+   *
+   * @return the map of constant labels for the Metric.
+   * @since 0.21
+   */
+  public abstract Map<LabelKey, LabelValue> getConstantLabels();
+
+  /**
+   * Returns a new {@link Builder} with default options.
+   *
+   * @return a new {@code Builder} with default options.
+   * @since 0.20.0
+   */
+  public static Builder builder() {
+    return new AutoValue_MetricOptions.Builder()
+        .setDescription("")
+        .setUnit("1")
+        .setLabelKeys(Collections.<LabelKey>emptyList())
+        .setConstantLabels(Collections.<LabelKey, LabelValue>emptyMap());
+  }
+
+  /**
+   * Builder for {@link MetricOptions}.
+   *
+   * @since 0.20
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    /**
+     * Sets the description of the Metric.
+     *
+     * @param description the description of the Metric.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setDescription(String description);
+
+    /**
+     * Sets the unit of the Metric.
+     *
+     * @param unit the unit of the Metric.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setUnit(String unit);
+
+    /**
+     * Sets the list of label keys for the Metric.
+     *
+     * @param labelKeys the list of label keys for the Metric.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setLabelKeys(List<LabelKey> labelKeys);
+
+    /**
+     * Sets the map of constant labels (they will be added to all the TimeSeries) for the Metric.
+     *
+     * @param constantLabels the map of constant labels for the Metric.
+     * @return this.
+     * @since 0.21
+     */
+    public abstract Builder setConstantLabels(Map<LabelKey, LabelValue> constantLabels);
+
+    abstract Map<LabelKey, LabelValue> getConstantLabels();
+
+    abstract List<LabelKey> getLabelKeys();
+
+    abstract MetricOptions autoBuild();
+
+    /**
+     * Builds and returns a {@code MetricOptions} with the desired options.
+     *
+     * @return a {@code MetricOptions} with the desired options.
+     * @since 0.20
+     * @throws NullPointerException if {@code description}, OR {@code unit} is null, OR {@code
+     *     labelKeys} is null OR any element of {@code labelKeys} is null, OR OR {@code
+     *     constantLabels} is null OR any element of {@code constantLabels} is null.
+     * @throws IllegalArgumentException if any {@code LabelKey} from the {@code labelKeys} is in the
+     *     {@code constantLabels}.
+     */
+    public MetricOptions build() {
+      setLabelKeys(Collections.unmodifiableList(new ArrayList<LabelKey>(getLabelKeys())));
+      setConstantLabels(
+          Collections.unmodifiableMap(
+              new LinkedHashMap<LabelKey, LabelValue>(getConstantLabels())));
+      MetricOptions options = autoBuild();
+      Utils.checkListElementNotNull(options.getLabelKeys(), "labelKeys elements");
+      Utils.checkMapElementNotNull(options.getConstantLabels(), "constantLabels elements");
+
+      HashSet<String> labelKeyNamesMap = new HashSet<String>();
+      for (LabelKey labelKey : options.getLabelKeys()) {
+        if (labelKeyNamesMap.contains(labelKey.getKey())) {
+          throw new IllegalArgumentException("Invalid LabelKey in labelKeys");
+        }
+        labelKeyNamesMap.add(labelKey.getKey());
+      }
+      for (Map.Entry<LabelKey, LabelValue> constantLabel : options.getConstantLabels().entrySet()) {
+        if (labelKeyNamesMap.contains(constantLabel.getKey().getKey())) {
+          throw new IllegalArgumentException("Invalid LabelKey in constantLabels");
+        }
+        labelKeyNamesMap.add(constantLabel.getKey().getKey());
+      }
+      return options;
+    }
+
+    Builder() {}
+  }
+
+  MetricOptions() {}
+}
diff --git a/api/src/main/java/io/opencensus/metrics/MetricRegistry.java b/api/src/main/java/io/opencensus/metrics/MetricRegistry.java
index 5be1559..a0be194 100644
--- a/api/src/main/java/io/opencensus/metrics/MetricRegistry.java
+++ b/api/src/main/java/io/opencensus/metrics/MetricRegistry.java
@@ -32,72 +32,190 @@
 @ExperimentalApi
 public abstract class MetricRegistry {
   /**
-   * Builds a new long gauge to be added to the registry. This is more convenient form when you want
-   * to manually increase and decrease values as per your service requirements.
+   * This will be removed in 0.22.
    *
-   * @param name the name of the metric.
-   * @param description the description of the metric.
-   * @param unit the unit of the metric.
-   * @param labelKeys the list of the label keys.
-   * @throws NullPointerException if {@code labelKeys} is null OR any element of {@code labelKeys}
-   *     is null OR {@code name}, {@code description}, {@code unit} is null.
-   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @deprecated since 0.20, use {@link #addLongGauge(String, MetricOptions)}.
    * @since 0.17
    */
-  @ExperimentalApi
-  public abstract LongGauge addLongGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys);
+  @Deprecated
+  public LongGauge addLongGauge(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return addLongGauge(
+        name,
+        MetricOptions.builder()
+            .setDescription(description)
+            .setUnit(unit)
+            .setLabelKeys(labelKeys)
+            .build());
+  }
 
   /**
-   * Builds a new double gauge to be added to the registry. This is more convenient form when you
+   * Builds a new long gauge to be added to the registry. This is a more convenient form when you
    * want to manually increase and decrease values as per your service requirements.
    *
    * @param name the name of the metric.
-   * @param description the description of the metric.
-   * @param unit the unit of the metric.
-   * @param labelKeys the list of the label keys.
-   * @throws NullPointerException if {@code labelKeys} is null OR any element of {@code labelKeys}
-   *     is null OR {@code name}, {@code description}, {@code unit} is null.
+   * @param options the options for the metric.
+   * @return a {@code LongGauge}.
+   * @throws NullPointerException if {@code name} is null.
    * @throws IllegalArgumentException if different metric with the same name already registered.
-   * @since 0.17
+   * @since 0.20
    */
   @ExperimentalApi
-  public abstract DoubleGauge addDoubleGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys);
+  public abstract LongGauge addLongGauge(String name, MetricOptions options);
 
   /**
-   * Builds a new derived long gauge to be added to the registry. This is more convenient form when
-   * you want to define a gauge by executing a {@link ToLongFunction} on an object.
+   * This will be removed in 0.22.
+   *
+   * @deprecated since 0.20, use {@link #addDoubleGauge(String, MetricOptions)}.
+   * @since 0.17
+   */
+  @Deprecated
+  public DoubleGauge addDoubleGauge(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return addDoubleGauge(
+        name,
+        MetricOptions.builder()
+            .setDescription(description)
+            .setUnit(unit)
+            .setLabelKeys(labelKeys)
+            .build());
+  }
+
+  /**
+   * Builds a new double gauge to be added to the registry. This is a more convenient form when you
+   * want to manually increase and decrease values as per your service requirements.
    *
    * @param name the name of the metric.
-   * @param description the description of the metric.
-   * @param unit the unit of the metric.
-   * @param labelKeys the list of the label keys.
-   * @throws NullPointerException if {@code labelKeys} is null OR any element of {@code labelKeys}
-   *     is null OR {@code name}, {@code description}, {@code unit} is null.
+   * @param options the options for the metric.
+   * @return a {@code DoubleGauge}.
+   * @throws NullPointerException if {@code name} is null.
+   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @since 0.20
+   */
+  @ExperimentalApi
+  public abstract DoubleGauge addDoubleGauge(String name, MetricOptions options);
+
+  /**
+   * This will be removed in 0.22.
+   *
+   * @deprecated since 0.20, use {@link #addDerivedLongGauge(String, MetricOptions)}.
+   * @since 0.17
+   */
+  @Deprecated
+  public DerivedLongGauge addDerivedLongGauge(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return addDerivedLongGauge(
+        name,
+        MetricOptions.builder()
+            .setDescription(description)
+            .setUnit(unit)
+            .setLabelKeys(labelKeys)
+            .build());
+  }
+
+  /**
+   * Builds a new derived long gauge to be added to the registry. This is a more convenient form
+   * when you want to define a gauge by executing a {@link ToLongFunction} on an object.
+   *
+   * @param name the name of the metric.
+   * @param options the options for the metric.
+   * @return a {@code DerivedLongGauge}.
+   * @throws NullPointerException if {@code name} is null.
    * @throws IllegalArgumentException if different metric with the same name already registered.
    * @since 0.17
    */
   @ExperimentalApi
-  public abstract DerivedLongGauge addDerivedLongGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys);
+  public abstract DerivedLongGauge addDerivedLongGauge(String name, MetricOptions options);
 
   /**
-   * Builds a new derived double gauge to be added to the registry. This is more convenient form
+   * This will be removed in 0.22.
+   *
+   * @deprecated since 0.20, use {@link #addDerivedDoubleGauge(String, MetricOptions)}.
+   * @since 0.17
+   */
+  @Deprecated
+  public DerivedDoubleGauge addDerivedDoubleGauge(
+      String name, String description, String unit, List<LabelKey> labelKeys) {
+    return addDerivedDoubleGauge(
+        name,
+        MetricOptions.builder()
+            .setDescription(description)
+            .setUnit(unit)
+            .setLabelKeys(labelKeys)
+            .build());
+  }
+
+  /**
+   * Builds a new derived double gauge to be added to the registry. This is a more convenient form
    * when you want to define a gauge by executing a {@link ToDoubleFunction} on an object.
    *
    * @param name the name of the metric.
-   * @param description the description of the metric.
-   * @param unit the unit of the metric.
-   * @param labelKeys the list of the label keys.
-   * @throws NullPointerException if {@code labelKeys} is null OR any element of {@code labelKeys}
-   *     is null OR {@code name}, {@code description}, {@code unit} is null.
+   * @param options the options for the metric.
+   * @return a {@code DerivedDoubleGauge}.
+   * @throws NullPointerException if {@code name} is null.
    * @throws IllegalArgumentException if different metric with the same name already registered.
    * @since 0.17
    */
   @ExperimentalApi
-  public abstract DerivedDoubleGauge addDerivedDoubleGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys);
+  public abstract DerivedDoubleGauge addDerivedDoubleGauge(String name, MetricOptions options);
+
+  /**
+   * Builds a new long cumulative to be added to the registry. This is a more convenient form when
+   * you want to manually increase values as per your service requirements.
+   *
+   * @param name the name of the metric.
+   * @param options the options for the metric.
+   * @return a {@code LongCumulative}.
+   * @throws NullPointerException if {@code name} is null.
+   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @since 0.21
+   */
+  @ExperimentalApi
+  public abstract LongCumulative addLongCumulative(String name, MetricOptions options);
+
+  /**
+   * Builds a new double cumulative to be added to the registry. This is a more convenient form when
+   * you want to manually increase values as per your service requirements.
+   *
+   * @param name the name of the metric.
+   * @param options the options for the metric.
+   * @return a {@code DoubleCumulative}.
+   * @throws NullPointerException if {@code name} is null.
+   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @since 0.21
+   */
+  @ExperimentalApi
+  public abstract DoubleCumulative addDoubleCumulative(String name, MetricOptions options);
+
+  /**
+   * Builds a new derived long cumulative to be added to the registry. This is a more convenient
+   * form when you want to define a cumulative by executing a {@link ToLongFunction} on an object.
+   *
+   * @param name the name of the metric.
+   * @param options the options for the metric.
+   * @return a {@code DerivedLongCumulative}.
+   * @throws NullPointerException if {@code name} is null.
+   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @since 0.21
+   */
+  @ExperimentalApi
+  public abstract DerivedLongCumulative addDerivedLongCumulative(
+      String name, MetricOptions options);
+
+  /**
+   * Builds a new derived double cumulative to be added to the registry. This is a more convenient
+   * form when you want to define a cumulative by executing a {@link ToDoubleFunction} on an object.
+   *
+   * @param name the name of the metric.
+   * @param options the options for the metric.
+   * @return a {@code DerivedDoubleCumulative}.
+   * @throws NullPointerException if {@code name} is null.
+   * @throws IllegalArgumentException if different metric with the same name already registered.
+   * @since 0.21
+   */
+  @ExperimentalApi
+  public abstract DerivedDoubleCumulative addDerivedDoubleCumulative(
+      String name, MetricOptions options);
 
   static MetricRegistry newNoopMetricRegistry() {
     return new NoopMetricRegistry();
@@ -106,51 +224,75 @@
   private static final class NoopMetricRegistry extends MetricRegistry {
 
     @Override
-    public LongGauge addLongGauge(
-        String name, String description, String unit, List<LabelKey> labelKeys) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+    public LongGauge addLongGauge(String name, MetricOptions options) {
       return LongGauge.newNoopLongGauge(
           Utils.checkNotNull(name, "name"),
-          Utils.checkNotNull(description, "description"),
-          Utils.checkNotNull(unit, "unit"),
-          labelKeys);
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
     }
 
     @Override
-    public DoubleGauge addDoubleGauge(
-        String name, String description, String unit, List<LabelKey> labelKeys) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+    public DoubleGauge addDoubleGauge(String name, MetricOptions options) {
       return DoubleGauge.newNoopDoubleGauge(
           Utils.checkNotNull(name, "name"),
-          Utils.checkNotNull(description, "description"),
-          Utils.checkNotNull(unit, "unit"),
-          labelKeys);
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
     }
 
     @Override
-    public DerivedLongGauge addDerivedLongGauge(
-        String name, String description, String unit, List<LabelKey> labelKeys) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+    public DerivedLongGauge addDerivedLongGauge(String name, MetricOptions options) {
       return DerivedLongGauge.newNoopDerivedLongGauge(
           Utils.checkNotNull(name, "name"),
-          Utils.checkNotNull(description, "description"),
-          Utils.checkNotNull(unit, "unit"),
-          labelKeys);
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
     }
 
     @Override
-    public DerivedDoubleGauge addDerivedDoubleGauge(
-        String name, String description, String unit, List<LabelKey> labelKeys) {
-      Utils.checkListElementNotNull(
-          Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+    public DerivedDoubleGauge addDerivedDoubleGauge(String name, MetricOptions options) {
       return DerivedDoubleGauge.newNoopDerivedDoubleGauge(
           Utils.checkNotNull(name, "name"),
-          Utils.checkNotNull(description, "description"),
-          Utils.checkNotNull(unit, "unit"),
-          labelKeys);
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
+    }
+
+    @Override
+    public LongCumulative addLongCumulative(String name, MetricOptions options) {
+      return LongCumulative.newNoopLongCumulative(
+          Utils.checkNotNull(name, "name"),
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
+    }
+
+    @Override
+    public DoubleCumulative addDoubleCumulative(String name, MetricOptions options) {
+      return DoubleCumulative.newNoopDoubleCumulative(
+          Utils.checkNotNull(name, "name"),
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
+    }
+
+    @Override
+    public DerivedLongCumulative addDerivedLongCumulative(String name, MetricOptions options) {
+      return DerivedLongCumulative.newNoopDerivedLongCumulative(
+          Utils.checkNotNull(name, "name"),
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
+    }
+
+    @Override
+    public DerivedDoubleCumulative addDerivedDoubleCumulative(String name, MetricOptions options) {
+      return DerivedDoubleCumulative.newNoopDerivedDoubleCumulative(
+          Utils.checkNotNull(name, "name"),
+          options.getDescription(),
+          options.getUnit(),
+          options.getLabelKeys());
     }
   }
 }
diff --git a/api/src/main/java/io/opencensus/metrics/data/AttachmentValue.java b/api/src/main/java/io/opencensus/metrics/data/AttachmentValue.java
new file mode 100644
index 0000000..fd48aa1
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/data/AttachmentValue.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics.data;
+
+import com.google.auto.value.AutoValue;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * The value of {@link Exemplar} attachment.
+ *
+ * <p>In Stats API we only provide one subclass {@link AttachmentValueString}. No other subclasses
+ * are added because we don't want to introduce dependencies on other libraries, for example Tracing
+ * APIs.
+ *
+ * <p>Other packages are free to extend this class to hold specific information. As an example, see
+ * {@code io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext}.
+ *
+ * @since 0.20
+ */
+public abstract class AttachmentValue {
+
+  /**
+   * Returns the string attachment value.
+   *
+   * @return the string attachment value.
+   * @since 0.20
+   */
+  public abstract String getValue();
+
+  /**
+   * String {@link AttachmentValue}.
+   *
+   * @since 0.20
+   */
+  @AutoValue
+  @Immutable
+  public abstract static class AttachmentValueString extends AttachmentValue {
+
+    AttachmentValueString() {}
+
+    /**
+     * Creates an {@link AttachmentValueString}.
+     *
+     * @param value the string value.
+     * @return an {@code AttachmentValueString}.
+     * @since 0.20
+     */
+    public static AttachmentValueString create(String value) {
+      return new AutoValue_AttachmentValue_AttachmentValueString(value);
+    }
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/data/Exemplar.java b/api/src/main/java/io/opencensus/metrics/data/Exemplar.java
new file mode 100644
index 0000000..95119f5
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/data/Exemplar.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics.data;
+
+import static io.opencensus.internal.Utils.checkNotNull;
+
+import com.google.auto.value.AutoValue;
+import io.opencensus.common.Timestamp;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * An example point that may be used to annotate aggregated distribution values, associated with a
+ * histogram bucket.
+ *
+ * @since 0.20
+ */
+@Immutable
+@AutoValue
+public abstract class Exemplar {
+
+  Exemplar() {}
+
+  /**
+   * Returns value of the {@link Exemplar} point.
+   *
+   * @return value of the {@code Exemplar} point.
+   * @since 0.20
+   */
+  public abstract double getValue();
+
+  /**
+   * Returns the time that this {@link Exemplar}'s value was recorded.
+   *
+   * @return the time that this {@code Exemplar}'s value was recorded.
+   * @since 0.20
+   */
+  public abstract Timestamp getTimestamp();
+
+  /**
+   * Returns the contextual information about the example value.
+   *
+   * @return the contextual information about the example value.
+   * @since 0.20
+   */
+  public abstract Map<String, AttachmentValue> getAttachments();
+
+  /**
+   * Creates an {@link Exemplar}.
+   *
+   * @param value value of the {@link Exemplar} point.
+   * @param timestamp the time that this {@code Exemplar}'s value was recorded.
+   * @param attachments the contextual information about the example value.
+   * @return an {@code Exemplar}.
+   * @since 0.20
+   */
+  public static Exemplar create(
+      double value, Timestamp timestamp, Map<String, AttachmentValue> attachments) {
+    checkNotNull(attachments, "attachments");
+    Map<String, AttachmentValue> attachmentsCopy =
+        Collections.unmodifiableMap(new HashMap<String, AttachmentValue>(attachments));
+    for (Entry<String, AttachmentValue> entry : attachmentsCopy.entrySet()) {
+      checkNotNull(entry.getKey(), "key of attachments");
+      checkNotNull(entry.getValue(), "value of attachments");
+    }
+    return new AutoValue_Exemplar(value, timestamp, attachmentsCopy);
+  }
+}
diff --git a/api/src/main/java/io/opencensus/metrics/data/package-info.java b/api/src/main/java/io/opencensus/metrics/data/package-info.java
new file mode 100644
index 0000000..a58b2bb
--- /dev/null
+++ b/api/src/main/java/io/opencensus/metrics/data/package-info.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package describes common data models in Metrics that are shared across multiple packages.
+ *
+ * <p>WARNING: Currently all the public classes under this package are marked as {@link
+ * io.opencensus.common.ExperimentalApi}. The classes and APIs under {@link io.opencensus.metrics}
+ * are likely to get backwards-incompatible updates in the future. DO NOT USE except for
+ * experimental purposes.
+ */
+@io.opencensus.common.ExperimentalApi
+package io.opencensus.metrics.data;
diff --git a/api/src/main/java/io/opencensus/metrics/export/Distribution.java b/api/src/main/java/io/opencensus/metrics/export/Distribution.java
index d55f101..b08fa3e 100644
--- a/api/src/main/java/io/opencensus/metrics/export/Distribution.java
+++ b/api/src/main/java/io/opencensus/metrics/export/Distribution.java
@@ -19,14 +19,11 @@
 import com.google.auto.value.AutoValue;
 import io.opencensus.common.ExperimentalApi;
 import io.opencensus.common.Function;
-import io.opencensus.common.Timestamp;
 import io.opencensus.internal.Utils;
+import io.opencensus.metrics.data.Exemplar;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
 
@@ -284,62 +281,4 @@
     @Nullable
     public abstract Exemplar getExemplar();
   }
-
-  /**
-   * An example point that may be used to annotate aggregated distribution values, associated with a
-   * histogram bucket.
-   *
-   * @since 0.17
-   */
-  @Immutable
-  @AutoValue
-  public abstract static class Exemplar {
-
-    Exemplar() {}
-
-    /**
-     * Returns value of the {@link Exemplar} point.
-     *
-     * @return value of the {@code Exemplar} point.
-     * @since 0.17
-     */
-    public abstract double getValue();
-
-    /**
-     * Returns the time that this {@link Exemplar}'s value was recorded.
-     *
-     * @return the time that this {@code Exemplar}'s value was recorded.
-     * @since 0.17
-     */
-    public abstract Timestamp getTimestamp();
-
-    /**
-     * Returns the contextual information about the example value, represented as a string map.
-     *
-     * @return the contextual information about the example value.
-     * @since 0.17
-     */
-    public abstract Map<String, String> getAttachments();
-
-    /**
-     * Creates an {@link Exemplar}.
-     *
-     * @param value value of the {@link Exemplar} point.
-     * @param timestamp the time that this {@code Exemplar}'s value was recorded.
-     * @param attachments the contextual information about the example value.
-     * @return an {@code Exemplar}.
-     * @since 0.17
-     */
-    public static Exemplar create(
-        double value, Timestamp timestamp, Map<String, String> attachments) {
-      Utils.checkNotNull(attachments, "attachments");
-      Map<String, String> attachmentsCopy =
-          Collections.unmodifiableMap(new HashMap<String, String>(attachments));
-      for (Entry<String, String> entry : attachmentsCopy.entrySet()) {
-        Utils.checkNotNull(entry.getKey(), "key of attachments");
-        Utils.checkNotNull(entry.getValue(), "value of attachments");
-      }
-      return new AutoValue_Distribution_Exemplar(value, timestamp, attachmentsCopy);
-    }
-  }
 }
diff --git a/api/src/main/java/io/opencensus/metrics/export/MetricDescriptor.java b/api/src/main/java/io/opencensus/metrics/export/MetricDescriptor.java
index a4629f8..b7e5ab8 100644
--- a/api/src/main/java/io/opencensus/metrics/export/MetricDescriptor.java
+++ b/api/src/main/java/io/opencensus/metrics/export/MetricDescriptor.java
@@ -50,8 +50,7 @@
    */
   public static MetricDescriptor create(
       String name, String description, String unit, Type type, List<LabelKey> labelKeys) {
-    Utils.checkNotNull(labelKeys, "labelKeys");
-    Utils.checkListElementNotNull(labelKeys, "labelKey");
+    Utils.checkListElementNotNull(Utils.checkNotNull(labelKeys, "labelKeys"), "labelKey");
     return new AutoValue_MetricDescriptor(
         name,
         description,
diff --git a/api/src/main/java/io/opencensus/metrics/export/Summary.java b/api/src/main/java/io/opencensus/metrics/export/Summary.java
index c82ca96..e50ef42 100644
--- a/api/src/main/java/io/opencensus/metrics/export/Summary.java
+++ b/api/src/main/java/io/opencensus/metrics/export/Summary.java
@@ -125,8 +125,8 @@
     public static Snapshot create(
         @Nullable Long count, @Nullable Double sum, List<ValueAtPercentile> valueAtPercentiles) {
       checkCountAndSum(count, sum);
-      Utils.checkNotNull(valueAtPercentiles, "valueAtPercentiles");
-      Utils.checkListElementNotNull(valueAtPercentiles, "value in valueAtPercentiles");
+      Utils.checkListElementNotNull(
+          Utils.checkNotNull(valueAtPercentiles, "valueAtPercentiles"), "valueAtPercentile");
       return new AutoValue_Summary_Snapshot(
           count,
           sum,
diff --git a/api/src/main/java/io/opencensus/metrics/export/TimeSeries.java b/api/src/main/java/io/opencensus/metrics/export/TimeSeries.java
index bfaeae9..4325fae 100644
--- a/api/src/main/java/io/opencensus/metrics/export/TimeSeries.java
+++ b/api/src/main/java/io/opencensus/metrics/export/TimeSeries.java
@@ -52,13 +52,23 @@
    */
   public static TimeSeries create(
       List<LabelValue> labelValues, List<Point> points, @Nullable Timestamp startTimestamp) {
-    Utils.checkNotNull(points, "points");
-    Utils.checkListElementNotNull(points, "point");
+    Utils.checkListElementNotNull(Utils.checkNotNull(points, "points"), "point");
     return createInternal(
         labelValues, Collections.unmodifiableList(new ArrayList<Point>(points)), startTimestamp);
   }
 
   /**
+   * Creates a {@link TimeSeries} with empty(or no) points.
+   *
+   * @param labelValues the {@code LabelValue}s that uniquely identify this {@code TimeSeries}.
+   * @return a {@code TimeSeries}.
+   * @since 0.17
+   */
+  public static TimeSeries create(List<LabelValue> labelValues) {
+    return createInternal(labelValues, Collections.<Point>emptyList(), null);
+  }
+
+  /**
    * Creates a {@link TimeSeries}.
    *
    * @param labelValues the {@code LabelValue}s that uniquely identify this {@code TimeSeries}.
@@ -75,6 +85,18 @@
   }
 
   /**
+   * Sets the {@code Point} of the {@link TimeSeries}.
+   *
+   * @param point the single data {@code Point} of this {@code TimeSeries}.
+   * @return a {@code TimeSeries}.
+   * @since 0.17
+   */
+  public TimeSeries setPoint(Point point) {
+    Utils.checkNotNull(point, "point");
+    return new AutoValue_TimeSeries(getLabelValues(), Collections.singletonList(point), null);
+  }
+
+  /**
    * Creates a {@link TimeSeries}.
    *
    * @param labelValues the {@code LabelValue}s that uniquely identify this {@code TimeSeries}.
@@ -86,8 +108,7 @@
   private static TimeSeries createInternal(
       List<LabelValue> labelValues, List<Point> points, @Nullable Timestamp startTimestamp) {
     // Fail fast on null lists to prevent NullPointerException when copying the lists.
-    Utils.checkNotNull(labelValues, "labelValues");
-    Utils.checkListElementNotNull(labelValues, "labelValue");
+    Utils.checkListElementNotNull(Utils.checkNotNull(labelValues, "labelValues"), "labelValue");
     return new AutoValue_TimeSeries(
         Collections.unmodifiableList(new ArrayList<LabelValue>(labelValues)),
         points,
diff --git a/api/src/main/java/io/opencensus/resource/Resource.java b/api/src/main/java/io/opencensus/resource/Resource.java
new file mode 100644
index 0000000..cfb8892
--- /dev/null
+++ b/api/src/main/java/io/opencensus/resource/Resource.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.resource;
+
+import com.google.auto.value.AutoValue;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.internal.DefaultVisibilityForTesting;
+import io.opencensus.internal.StringUtils;
+import io.opencensus.internal.Utils;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * {@link Resource} represents a resource, which capture identifying information about the entities
+ * for which signals (stats or traces) are reported. It further provides a framework for detection
+ * of resource information from the environment and progressive population as signals propagate from
+ * the core instrumentation library to a backend's exporter.
+ *
+ * @since 0.18
+ */
+@Immutable
+@AutoValue
+@ExperimentalApi
+public abstract class Resource {
+  @DefaultVisibilityForTesting static final int MAX_LENGTH = 255;
+  private static final String OC_RESOURCE_TYPE_ENV = "OC_RESOURCE_TYPE";
+  private static final String OC_RESOURCE_LABELS_ENV = "OC_RESOURCE_LABELS";
+  private static final String LABEL_LIST_SPLITTER = ",";
+  private static final String LABEL_KEY_VALUE_SPLITTER = "=";
+  private static final String ERROR_MESSAGE_INVALID_CHARS =
+      " should be a ASCII string with a length greater than 0 and not exceed "
+          + MAX_LENGTH
+          + " characters.";
+  private static final String ERROR_MESSAGE_INVALID_VALUE =
+      " should be a ASCII string with a length not exceed " + MAX_LENGTH + " characters.";
+
+  @Nullable
+  private static final String ENV_TYPE = parseResourceType(System.getenv(OC_RESOURCE_TYPE_ENV));
+
+  private static final Map<String, String> ENV_LABEL_MAP =
+      parseResourceLabels(System.getenv(OC_RESOURCE_LABELS_ENV));
+
+  Resource() {}
+
+  /**
+   * Returns the type identifier for the resource.
+   *
+   * @return the type identifier for the resource.
+   * @since 0.18
+   */
+  @Nullable
+  public abstract String getType();
+
+  /**
+   * Returns a map of labels that describe the resource.
+   *
+   * @return a map of labels.
+   * @since 0.18
+   */
+  public abstract Map<String, String> getLabels();
+
+  /**
+   * Returns a {@link Resource}. This resource information is loaded from the OC_RESOURCE_TYPE and
+   * OC_RESOURCE_LABELS environment variables.
+   *
+   * @return a {@code Resource}.
+   * @since 0.18
+   */
+  public static Resource createFromEnvironmentVariables() {
+    return createInternal(ENV_TYPE, ENV_LABEL_MAP);
+  }
+
+  /**
+   * Returns a {@link Resource}.
+   *
+   * @param type the type identifier for the resource.
+   * @param labels a map of labels that describe the resource.
+   * @return a {@code Resource}.
+   * @throws NullPointerException if {@code labels} is null.
+   * @throws IllegalArgumentException if type or label key or label value is not a valid printable
+   *     ASCII string or exceed {@link #MAX_LENGTH} characters.
+   * @since 0.18
+   */
+  public static Resource create(@Nullable String type, Map<String, String> labels) {
+    return createInternal(
+        type,
+        Collections.unmodifiableMap(
+            new LinkedHashMap<String, String>(Utils.checkNotNull(labels, "labels"))));
+  }
+
+  /**
+   * Returns a {@link Resource} that runs all input resources sequentially and merges their results.
+   * In case a type of label key is already set, the first set value takes precedence.
+   *
+   * @param resources a list of resources.
+   * @return a {@code Resource}.
+   * @since 0.18
+   */
+  @Nullable
+  public static Resource mergeResources(List<Resource> resources) {
+    Resource currentResource = null;
+    for (Resource resource : resources) {
+      currentResource = merge(currentResource, resource);
+    }
+    return currentResource;
+  }
+
+  private static Resource createInternal(@Nullable String type, Map<String, String> labels) {
+    return new AutoValue_Resource(type, labels);
+  }
+
+  /**
+   * Creates a resource type from the OC_RESOURCE_TYPE environment variable.
+   *
+   * <p>OC_RESOURCE_TYPE: A string that describes the type of the resource prefixed by a domain
+   * namespace, e.g. “kubernetes.io/container”.
+   */
+  @Nullable
+  static String parseResourceType(@Nullable String rawEnvType) {
+    if (rawEnvType != null && !rawEnvType.isEmpty()) {
+      Utils.checkArgument(isValidAndNotEmpty(rawEnvType), "Type" + ERROR_MESSAGE_INVALID_CHARS);
+      return rawEnvType.trim();
+    }
+    return rawEnvType;
+  }
+
+  /*
+   * Creates a label map from the OC_RESOURCE_LABELS environment variable.
+   *
+   * <p>OC_RESOURCE_LABELS: A comma-separated list of labels describing the source in more detail,
+   * e.g. “key1=val1,key2=val2”. Domain names and paths are accepted as label keys. Values may be
+   * quoted or unquoted in general. If a value contains whitespaces, =, or " characters, it must
+   * always be quoted.
+   */
+  static Map<String, String> parseResourceLabels(@Nullable String rawEnvLabels) {
+    if (rawEnvLabels == null) {
+      return Collections.<String, String>emptyMap();
+    } else {
+      Map<String, String> labels = new HashMap<String, String>();
+      String[] rawLabels = rawEnvLabels.split(LABEL_LIST_SPLITTER, -1);
+      for (String rawLabel : rawLabels) {
+        String[] keyValuePair = rawLabel.split(LABEL_KEY_VALUE_SPLITTER, -1);
+        if (keyValuePair.length != 2) {
+          continue;
+        }
+        String key = keyValuePair[0].trim();
+        String value = keyValuePair[1].trim().replaceAll("^\"|\"$", "");
+        Utils.checkArgument(isValidAndNotEmpty(key), "Label key" + ERROR_MESSAGE_INVALID_CHARS);
+        Utils.checkArgument(isValid(value), "Label value" + ERROR_MESSAGE_INVALID_VALUE);
+        labels.put(key, value);
+      }
+      return Collections.unmodifiableMap(labels);
+    }
+  }
+
+  /**
+   * Returns a new, merged {@link Resource} by merging two resources. In case of a collision, first
+   * resource takes precedence.
+   */
+  @Nullable
+  private static Resource merge(@Nullable Resource resource, @Nullable Resource otherResource) {
+    if (otherResource == null) {
+      return resource;
+    }
+    if (resource == null) {
+      return otherResource;
+    }
+
+    String mergedType = resource.getType() != null ? resource.getType() : otherResource.getType();
+    Map<String, String> mergedLabelMap =
+        new LinkedHashMap<String, String>(otherResource.getLabels());
+
+    // Labels from resource overwrite labels from otherResource.
+    for (Entry<String, String> entry : resource.getLabels().entrySet()) {
+      mergedLabelMap.put(entry.getKey(), entry.getValue());
+    }
+    return createInternal(mergedType, Collections.unmodifiableMap(mergedLabelMap));
+  }
+
+  /**
+   * Determines whether the given {@code String} is a valid printable ASCII string with a length not
+   * exceed {@link #MAX_LENGTH} characters.
+   *
+   * @param name the name to be validated.
+   * @return whether the name is valid.
+   */
+  private static boolean isValid(String name) {
+    return name.length() <= MAX_LENGTH && StringUtils.isPrintableString(name);
+  }
+
+  /**
+   * Determines whether the given {@code String} is a valid printable ASCII string with a length
+   * greater than 0 and not exceed {@link #MAX_LENGTH} characters.
+   *
+   * @param name the name to be validated.
+   * @return whether the name is valid.
+   */
+  private static boolean isValidAndNotEmpty(String name) {
+    return !name.isEmpty() && isValid(name);
+  }
+}
diff --git a/api/src/main/java/io/opencensus/resource/package-info.java b/api/src/main/java/io/opencensus/resource/package-info.java
new file mode 100644
index 0000000..942e6b0
--- /dev/null
+++ b/api/src/main/java/io/opencensus/resource/package-info.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * API for resource information population.
+ *
+ * <p>The resource library primarily defines a type "Resource" that captures information about the
+ * entity for which stats or traces are recorded. For example, metrics exposed by a Kubernetes
+ * container can be linked to a resource that specifies the cluster, namespace, pod, and container
+ * name.
+ *
+ * <p>Two environment variables are used to populate resource information:
+ *
+ * <ul>
+ *   <li>OC_RESOURCE_TYPE: A string that describes the type of the resource prefixed by a domain
+ *       namespace. Leading and trailing whitespaces are trimmed. e.g. “kubernetes.io/container”.
+ *   <li>OC_RESOURCE_LABELS: A comma-separated list of labels describing the source in more detail,
+ *       e.g. “key1=val1,key2=val2”. The allowed character set is appropriately constrained.
+ * </ul>
+ *
+ * <p>Type, label keys, and label values MUST contain only printable ASCII (codes between 32 and
+ * 126, inclusive) and less than 256 characters. Type and label keys MUST have a length greater than
+ * zero. They SHOULD start with a domain name and separate hierarchies with / characters, e.g.
+ * k8s.io/namespace/name.
+ *
+ * <p>WARNING: Currently all the public classes under this package are marked as {@link
+ * io.opencensus.common.ExperimentalApi}. DO NOT USE except for experimental purposes.
+ *
+ * <p>Please see
+ * https://github.com/census-instrumentation/opencensus-specs/blob/master/resource/Resource.md for
+ * more details.
+ */
+@io.opencensus.common.ExperimentalApi
+package io.opencensus.resource;
diff --git a/api/src/main/java/io/opencensus/stats/AggregationData.java b/api/src/main/java/io/opencensus/stats/AggregationData.java
index c6e12b6..23c9af9 100644
--- a/api/src/main/java/io/opencensus/stats/AggregationData.java
+++ b/api/src/main/java/io/opencensus/stats/AggregationData.java
@@ -18,14 +18,11 @@
 
 import com.google.auto.value.AutoValue;
 import io.opencensus.common.Function;
-import io.opencensus.common.Timestamp;
 import io.opencensus.internal.Utils;
+import io.opencensus.metrics.data.Exemplar;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
 import javax.annotation.concurrent.Immutable;
 
 /**
@@ -275,7 +272,10 @@
      * @param exemplars the exemplars associated with histogram buckets.
      * @return a {@code DistributionData}.
      * @since 0.16
+     * @deprecated since 0.17. Use {@link #create(double, long, double, List, List)}
      */
+    @Deprecated
+    @SuppressWarnings("InconsistentOverloads")
     public static DistributionData create(
         double mean,
         long count,
@@ -284,17 +284,34 @@
         double sumOfSquaredDeviations,
         List<Long> bucketCounts,
         List<Exemplar> exemplars) {
-      if (min != Double.POSITIVE_INFINITY || max != Double.NEGATIVE_INFINITY) {
-        Utils.checkArgument(min <= max, "max should be greater or equal to min.");
+      return create(mean, count, sumOfSquaredDeviations, bucketCounts, exemplars);
+    }
+
+    /**
+     * Creates a {@code DistributionData}.
+     *
+     * @param mean mean value.
+     * @param count count value.
+     * @param sumOfSquaredDeviations sum of squared deviations.
+     * @param bucketCounts histogram bucket counts.
+     * @param exemplars the exemplars associated with histogram buckets.
+     * @return a {@code DistributionData}.
+     * @since 0.17
+     */
+    public static DistributionData create(
+        double mean,
+        long count,
+        double sumOfSquaredDeviations,
+        List<Long> bucketCounts,
+        List<Exemplar> exemplars) {
+      List<Long> bucketCountsCopy =
+          Collections.unmodifiableList(
+              new ArrayList<Long>(Utils.checkNotNull(bucketCounts, "bucketCounts")));
+      for (Long bucketCount : bucketCountsCopy) {
+        Utils.checkNotNull(bucketCount, "bucketCount");
       }
 
-      Utils.checkNotNull(bucketCounts, "bucketCounts");
-      List<Long> bucketCountsCopy = Collections.unmodifiableList(new ArrayList<Long>(bucketCounts));
-      for (Long bucket : bucketCountsCopy) {
-        Utils.checkNotNull(bucket, "bucket");
-      }
-
-      Utils.checkNotNull(exemplars, "exemplar list should not be null.");
+      Utils.checkNotNull(exemplars, "exemplars");
       for (Exemplar exemplar : exemplars) {
         Utils.checkNotNull(exemplar, "exemplar");
       }
@@ -302,8 +319,6 @@
       return new AutoValue_AggregationData_DistributionData(
           mean,
           count,
-          min,
-          max,
           sumOfSquaredDeviations,
           bucketCountsCopy,
           Collections.<Exemplar>unmodifiableList(new ArrayList<Exemplar>(exemplars)));
@@ -320,7 +335,10 @@
      * @param bucketCounts histogram bucket counts.
      * @return a {@code DistributionData}.
      * @since 0.8
+     * @deprecated since 0.17. Use {@link #create(double, long, double, List)}.
      */
+    @Deprecated
+    @SuppressWarnings("InconsistentOverloads")
     public static DistributionData create(
         double mean,
         long count,
@@ -329,13 +347,23 @@
         double sumOfSquaredDeviations,
         List<Long> bucketCounts) {
       return create(
-          mean,
-          count,
-          min,
-          max,
-          sumOfSquaredDeviations,
-          bucketCounts,
-          Collections.<Exemplar>emptyList());
+          mean, count, sumOfSquaredDeviations, bucketCounts, Collections.<Exemplar>emptyList());
+    }
+
+    /**
+     * Creates a {@code DistributionData}.
+     *
+     * @param mean mean value.
+     * @param count count value.
+     * @param sumOfSquaredDeviations sum of squared deviations.
+     * @param bucketCounts histogram bucket counts.
+     * @return a {@code DistributionData}.
+     * @since 0.17
+     */
+    public static DistributionData create(
+        double mean, long count, double sumOfSquaredDeviations, List<Long> bucketCounts) {
+      return create(
+          mean, count, sumOfSquaredDeviations, bucketCounts, Collections.<Exemplar>emptyList());
     }
 
     /**
@@ -359,16 +387,24 @@
      *
      * @return the minimum of the population values.
      * @since 0.8
+     * @deprecated since 0.17. Returns {@code 0}.
      */
-    public abstract double getMin();
+    @Deprecated
+    public double getMin() {
+      return 0;
+    }
 
     /**
      * Returns the maximum of the population values.
      *
      * @return the maximum of the population values.
      * @since 0.8
+     * @deprecated since 0.17. Returns {@code 0}.
      */
-    public abstract double getMax();
+    @Deprecated
+    public double getMax() {
+      return 0;
+    }
 
     /**
      * Returns the aggregated sum of squared deviations.
@@ -406,65 +442,6 @@
         Function<? super AggregationData, T> defaultFunction) {
       return p3.apply(this);
     }
-
-    /**
-     * An example point that may be used to annotate aggregated distribution values, associated with
-     * a histogram bucket.
-     *
-     * @since 0.16
-     */
-    @Immutable
-    @AutoValue
-    public abstract static class Exemplar {
-
-      Exemplar() {}
-
-      /**
-       * Returns value of the {@link Exemplar} point.
-       *
-       * @return value of the {@code Exemplar} point.
-       * @since 0.16
-       */
-      public abstract double getValue();
-
-      /**
-       * Returns the time that this {@link Exemplar}'s value was recorded.
-       *
-       * @return the time that this {@code Exemplar}'s value was recorded.
-       * @since 0.16
-       */
-      public abstract Timestamp getTimestamp();
-
-      /**
-       * Returns the contextual information about the example value, represented as a string map.
-       *
-       * @return the contextual information about the example value.
-       * @since 0.16
-       */
-      public abstract Map<String, String> getAttachments();
-
-      /**
-       * Creates an {@link Exemplar}.
-       *
-       * @param value value of the {@link Exemplar} point.
-       * @param timestamp the time that this {@code Exemplar}'s value was recorded.
-       * @param attachments the contextual information about the example value.
-       * @return an {@code Exemplar}.
-       * @since 0.16
-       */
-      public static Exemplar create(
-          double value, Timestamp timestamp, Map<String, String> attachments) {
-        Utils.checkNotNull(attachments, "attachments");
-        Map<String, String> attachmentsCopy =
-            Collections.unmodifiableMap(new HashMap<String, String>(attachments));
-        for (Entry<String, String> entry : attachmentsCopy.entrySet()) {
-          Utils.checkNotNull(entry.getKey(), "key of attachments");
-          Utils.checkNotNull(entry.getValue(), "value of attachments");
-        }
-        return new AutoValue_AggregationData_DistributionData_Exemplar(
-            value, timestamp, attachmentsCopy);
-      }
-    }
   }
 
   /**
diff --git a/api/src/main/java/io/opencensus/stats/BucketBoundaries.java b/api/src/main/java/io/opencensus/stats/BucketBoundaries.java
index 61e21e6..258dbfd 100644
--- a/api/src/main/java/io/opencensus/stats/BucketBoundaries.java
+++ b/api/src/main/java/io/opencensus/stats/BucketBoundaries.java
@@ -21,6 +21,8 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 import javax.annotation.concurrent.Immutable;
 
 /**
@@ -32,6 +34,8 @@
 @AutoValue
 public abstract class BucketBoundaries {
 
+  private static final Logger logger = Logger.getLogger(BucketBoundaries.class.getName());
+
   /**
    * Returns a {@code BucketBoundaries} with the given buckets.
    *
@@ -46,14 +50,43 @@
     List<Double> bucketBoundariesCopy = new ArrayList<Double>(bucketBoundaries); // Deep copy.
     // Check if sorted.
     if (bucketBoundariesCopy.size() > 1) {
-      double lower = bucketBoundariesCopy.get(0);
+      double previous = bucketBoundariesCopy.get(0);
       for (int i = 1; i < bucketBoundariesCopy.size(); i++) {
         double next = bucketBoundariesCopy.get(i);
-        Utils.checkArgument(lower < next, "Bucket boundaries not sorted.");
-        lower = next;
+        Utils.checkArgument(previous < next, "Bucket boundaries not sorted.");
+        previous = next;
       }
     }
-    return new AutoValue_BucketBoundaries(Collections.unmodifiableList(bucketBoundariesCopy));
+    return new AutoValue_BucketBoundaries(
+        Collections.unmodifiableList(dropNegativeBucketBounds(bucketBoundariesCopy)));
+  }
+
+  private static List<Double> dropNegativeBucketBounds(List<Double> bucketBoundaries) {
+    // Negative values (BucketBounds) are currently not supported by any of the backends
+    // that OC supports.
+    int negativeBucketBounds = 0;
+    int zeroBucketBounds = 0;
+    for (Double value : bucketBoundaries) {
+      if (value <= 0) {
+        if (value == 0) {
+          zeroBucketBounds++;
+        } else {
+          negativeBucketBounds++;
+        }
+      } else {
+        break;
+      }
+    }
+
+    if (negativeBucketBounds > 0) {
+      logger.log(
+          Level.WARNING,
+          "Dropping "
+              + negativeBucketBounds
+              + " negative bucket boundaries, the values must be strictly > 0.");
+    }
+    return bucketBoundaries.subList(
+        negativeBucketBounds + zeroBucketBounds, bucketBoundaries.size());
   }
 
   /**
diff --git a/api/src/main/java/io/opencensus/stats/MeasureMap.java b/api/src/main/java/io/opencensus/stats/MeasureMap.java
index beb84f0..ba79487 100644
--- a/api/src/main/java/io/opencensus/stats/MeasureMap.java
+++ b/api/src/main/java/io/opencensus/stats/MeasureMap.java
@@ -17,6 +17,8 @@
 package io.opencensus.stats;
 
 import io.opencensus.internal.Utils;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
 import io.opencensus.tags.TagContext;
@@ -62,9 +64,25 @@
    * @param value the string representation of contextual information of an {@code Exemplar}.
    * @return this
    * @since 0.16
+   * @deprecated in favor of {@link #putAttachment(String, AttachmentValue)}.
    */
-  // TODO(songya): make this method abstract in the 0.17 release.
+  @Deprecated
   public MeasureMap putAttachment(String key, String value) {
+    return putAttachment(key, AttachmentValueString.create(value));
+  }
+
+  /**
+   * Associate the contextual information of an {@code Exemplar} to this {@link MeasureMap}.
+   * Contextual information is represented as a {@code String} key and an {@link AttachmentValue}.
+   *
+   * <p>If this method is called multiple times with the same key, only the last value will be kept.
+   *
+   * @param key the key of contextual information of an {@code Exemplar}.
+   * @param value the value of contextual information of an {@code Exemplar}.
+   * @return this
+   * @since 0.20
+   */
+  public MeasureMap putAttachment(String key, AttachmentValue value) {
     // Provides a default no-op implementation to avoid breaking other existing sub-classes.
     Utils.checkNotNull(key, "key");
     Utils.checkNotNull(value, "value");
diff --git a/api/src/main/java/io/opencensus/stats/NoopStats.java b/api/src/main/java/io/opencensus/stats/NoopStats.java
index e7e94a3..ee7b97a 100644
--- a/api/src/main/java/io/opencensus/stats/NoopStats.java
+++ b/api/src/main/java/io/opencensus/stats/NoopStats.java
@@ -30,6 +30,8 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 import javax.annotation.concurrent.GuardedBy;
 import javax.annotation.concurrent.Immutable;
 import javax.annotation.concurrent.ThreadSafe;
@@ -66,8 +68,8 @@
    *
    * @return a {@code MeasureMap} that ignores all calls to {@code MeasureMap#put}.
    */
-  static MeasureMap getNoopMeasureMap() {
-    return NoopMeasureMap.INSTANCE;
+  static MeasureMap newNoopMeasureMap() {
+    return new NoopMeasureMap();
   }
 
   /**
@@ -116,21 +118,27 @@
 
     @Override
     public MeasureMap newMeasureMap() {
-      return getNoopMeasureMap();
+      return newNoopMeasureMap();
     }
   }
 
-  @Immutable
   private static final class NoopMeasureMap extends MeasureMap {
-    static final MeasureMap INSTANCE = new NoopMeasureMap();
+    private static final Logger logger = Logger.getLogger(NoopMeasureMap.class.getName());
+    private boolean hasUnsupportedValues;
 
     @Override
     public MeasureMap put(MeasureDouble measure, double value) {
+      if (value < 0) {
+        hasUnsupportedValues = true;
+      }
       return this;
     }
 
     @Override
     public MeasureMap put(MeasureLong measure, long value) {
+      if (value < 0) {
+        hasUnsupportedValues = true;
+      }
       return this;
     }
 
@@ -140,6 +148,11 @@
     @Override
     public void record(TagContext tags) {
       Utils.checkNotNull(tags, "tags");
+
+      if (hasUnsupportedValues) {
+        // drop all the recorded values
+        logger.log(Level.WARNING, "Dropping values, value to record must be non-negative.");
+      }
     }
   }
 
diff --git a/api/src/main/java/io/opencensus/stats/View.java b/api/src/main/java/io/opencensus/stats/View.java
index f563ff9..b614c90 100644
--- a/api/src/main/java/io/opencensus/stats/View.java
+++ b/api/src/main/java/io/opencensus/stats/View.java
@@ -48,7 +48,7 @@
       new Comparator<TagKey>() {
         @Override
         public int compare(TagKey key1, TagKey key2) {
-          return key1.getName().compareTo(key2.getName());
+          return key1.getName().compareToIgnoreCase(key2.getName());
         }
       };
 
diff --git a/api/src/main/java/io/opencensus/tags/NoopTags.java b/api/src/main/java/io/opencensus/tags/NoopTags.java
index fb52b16..c4baadb 100644
--- a/api/src/main/java/io/opencensus/tags/NoopTags.java
+++ b/api/src/main/java/io/opencensus/tags/NoopTags.java
@@ -20,12 +20,20 @@
 import io.opencensus.internal.NoopScope;
 import io.opencensus.internal.Utils;
 import io.opencensus.tags.propagation.TagContextBinarySerializer;
+import io.opencensus.tags.propagation.TagContextDeserializationException;
+import io.opencensus.tags.propagation.TagContextSerializationException;
+import io.opencensus.tags.propagation.TagContextTextFormat;
 import io.opencensus.tags.propagation.TagPropagationComponent;
 import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import javax.annotation.concurrent.Immutable;
 import javax.annotation.concurrent.ThreadSafe;
 
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
 /** No-op implementations of tagging classes. */
 final class NoopTags {
 
@@ -80,8 +88,17 @@
     return NoopTagContextBinarySerializer.INSTANCE;
   }
 
+  /**
+   * Returns a {@code TagContextTextFormat} that serializes all {@code TagContext}s to empty strings
+   * and deserializes all inputs to empty {@code TagContext}s.
+   */
+  static TagContextTextFormat getNoopTagContextTextSerializer() {
+    return NoopTagContextTextFormat.INSTANCE;
+  }
+
   @ThreadSafe
   private static final class NoopTagsComponent extends TagsComponent {
+
     private volatile boolean isRead;
 
     @Override
@@ -110,6 +127,7 @@
 
   @Immutable
   private static final class NoopTagger extends Tagger {
+
     static final Tagger INSTANCE = new NoopTagger();
 
     @Override
@@ -147,9 +165,11 @@
 
   @Immutable
   private static final class NoopTagContextBuilder extends TagContextBuilder {
+
     static final TagContextBuilder INSTANCE = new NoopTagContextBuilder();
 
     @Override
+    @SuppressWarnings("deprecation")
     public TagContextBuilder put(TagKey key, TagValue value) {
       Utils.checkNotNull(key, "key");
       Utils.checkNotNull(value, "value");
@@ -157,6 +177,14 @@
     }
 
     @Override
+    public TagContextBuilder put(TagKey key, TagValue value, TagMetadata tagMetadata) {
+      Utils.checkNotNull(key, "key");
+      Utils.checkNotNull(value, "value");
+      Utils.checkNotNull(tagMetadata, "tagMetadata");
+      return this;
+    }
+
+    @Override
     public TagContextBuilder remove(TagKey key) {
       Utils.checkNotNull(key, "key");
       return this;
@@ -175,6 +203,7 @@
 
   @Immutable
   private static final class NoopTagContext extends TagContext {
+
     static final TagContext INSTANCE = new NoopTagContext();
 
     // TODO(sebright): Is there any way to let the user know that their tags were ignored?
@@ -186,16 +215,23 @@
 
   @Immutable
   private static final class NoopTagPropagationComponent extends TagPropagationComponent {
+
     static final TagPropagationComponent INSTANCE = new NoopTagPropagationComponent();
 
     @Override
     public TagContextBinarySerializer getBinarySerializer() {
       return getNoopTagContextBinarySerializer();
     }
+
+    @Override
+    public TagContextTextFormat getCorrelationContextFormat() {
+      return getNoopTagContextTextSerializer();
+    }
   }
 
   @Immutable
   private static final class NoopTagContextBinarySerializer extends TagContextBinarySerializer {
+
     static final TagContextBinarySerializer INSTANCE = new NoopTagContextBinarySerializer();
     static final byte[] EMPTY_BYTE_ARRAY = {};
 
@@ -211,4 +247,32 @@
       return getNoopTagContext();
     }
   }
+
+  @Immutable
+  private static final class NoopTagContextTextFormat extends TagContextTextFormat {
+
+    static final NoopTagContextTextFormat INSTANCE = new NoopTagContextTextFormat();
+
+    @Override
+    public List<String> fields() {
+      return Collections.<String>emptyList();
+    }
+
+    @Override
+    public <C /*>>> extends @NonNull Object*/> void inject(
+        TagContext tagContext, C carrier, Setter<C> setter)
+        throws TagContextSerializationException {
+      Utils.checkNotNull(tagContext, "tagContext");
+      Utils.checkNotNull(carrier, "carrier");
+      Utils.checkNotNull(setter, "setter");
+    }
+
+    @Override
+    public <C /*>>> extends @NonNull Object*/> TagContext extract(C carrier, Getter<C> getter)
+        throws TagContextDeserializationException {
+      Utils.checkNotNull(carrier, "carrier");
+      Utils.checkNotNull(getter, "getter");
+      return getNoopTagContext();
+    }
+  }
 }
diff --git a/api/src/main/java/io/opencensus/tags/Tag.java b/api/src/main/java/io/opencensus/tags/Tag.java
index 9e0a7a8..bc66d26 100644
--- a/api/src/main/java/io/opencensus/tags/Tag.java
+++ b/api/src/main/java/io/opencensus/tags/Tag.java
@@ -17,6 +17,7 @@
 package io.opencensus.tags;
 
 import com.google.auto.value.AutoValue;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import javax.annotation.concurrent.Immutable;
 
 /**
@@ -28,18 +29,41 @@
 @AutoValue
 public abstract class Tag {
 
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+
   Tag() {}
 
   /**
    * Creates a {@code Tag} from the given key and value.
    *
+   * <p>For backwards-compatibility this method still produces propagating {@link Tag}s.
+   *
+   * <p>This is equivalent to calling {@code create(key, value,
+   * TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION))}.
+   *
    * @param key the tag key.
    * @param value the tag value.
    * @return a {@code Tag} with the given key and value.
    * @since 0.8
+   * @deprecated in favor of {@link #create(TagKey, TagValue, TagMetadata)}.
    */
+  @Deprecated
   public static Tag create(TagKey key, TagValue value) {
-    return new AutoValue_Tag(key, value);
+    return create(key, value, METADATA_UNLIMITED_PROPAGATION);
+  }
+
+  /**
+   * Creates a {@code Tag} from the given key, value and metadata.
+   *
+   * @param key the tag key.
+   * @param value the tag value.
+   * @param tagMetadata the tag metadata.
+   * @return a {@code Tag}.
+   * @since 0.20
+   */
+  public static Tag create(TagKey key, TagValue value, TagMetadata tagMetadata) {
+    return new AutoValue_Tag(key, value, tagMetadata);
   }
 
   /**
@@ -57,4 +81,12 @@
    * @since 0.8
    */
   public abstract TagValue getValue();
+
+  /**
+   * Returns the {@link TagMetadata} associated with this {@link Tag}.
+   *
+   * @return the {@code TagMetadata}.
+   * @since 0.20
+   */
+  public abstract TagMetadata getTagMetadata();
 }
diff --git a/api/src/main/java/io/opencensus/tags/TagContextBuilder.java b/api/src/main/java/io/opencensus/tags/TagContextBuilder.java
index f426896..9548a14 100644
--- a/api/src/main/java/io/opencensus/tags/TagContextBuilder.java
+++ b/api/src/main/java/io/opencensus/tags/TagContextBuilder.java
@@ -17,6 +17,7 @@
 package io.opencensus.tags;
 
 import io.opencensus.common.Scope;
+import io.opencensus.tags.TagMetadata.TagTtl;
 
 /**
  * Builder for the {@link TagContext} class.
@@ -25,17 +26,79 @@
  */
 public abstract class TagContextBuilder {
 
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+
   /**
    * Adds the key/value pair regardless of whether the key is present.
    *
+   * <p>For backwards-compatibility this method still produces propagating {@link Tag}s.
+   *
+   * <p>Equivalent to calling {@code put(key, value,
+   * TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION))}.
+   *
    * @param key the {@code TagKey} which will be set.
    * @param value the {@code TagValue} to set for the given key.
    * @return this
    * @since 0.8
+   * @deprecated in favor of {@link #put(TagKey, TagValue, TagMetadata)}, or {@link
+   *     #putLocal(TagKey, TagValue)} if you only want in-process tags.
    */
+  @Deprecated
   public abstract TagContextBuilder put(TagKey key, TagValue value);
 
   /**
+   * Adds the key/value pair and metadata regardless of whether the key is present.
+   *
+   * @param key the {@code TagKey} which will be set.
+   * @param value the {@code TagValue} to set for the given key.
+   * @param tagMetadata the {@code TagMetadata} associated with this {@link Tag}.
+   * @return this
+   * @since 0.20
+   */
+  public TagContextBuilder put(TagKey key, TagValue value, TagMetadata tagMetadata) {
+    @SuppressWarnings("deprecation")
+    TagContextBuilder builder = put(key, value);
+    return builder;
+  }
+
+  /**
+   * Adds a non-propagating tag to this {@code TagContextBuilder}.
+   *
+   * <p>This is equivalent to calling {@code put(key, value,
+   * TagMetadata.create(TagTtl.NO_PROPAGATION))}.
+   *
+   * @param key the {@code TagKey} which will be set.
+   * @param value the {@code TagValue} to set for the given key.
+   * @return this
+   * @since 0.21
+   */
+  public final TagContextBuilder putLocal(TagKey key, TagValue value) {
+    return put(key, value, METADATA_NO_PROPAGATION);
+  }
+
+  /**
+   * Adds an unlimited propagating tag to this {@code TagContextBuilder}.
+   *
+   * <p>This is equivalent to calling {@code put(key, value,
+   * TagMetadata.create(TagTtl.METADATA_UNLIMITED_PROPAGATION))}.
+   *
+   * <p>Only call this method if you want propagating tags. If you want tags for breaking down
+   * metrics, or there are sensitive messages in your tags, use {@link #putLocal(TagKey, TagValue)}
+   * instead.
+   *
+   * @param key the {@code TagKey} which will be set.
+   * @param value the {@code TagValue} to set for the given key.
+   * @return this
+   * @since 0.21
+   */
+  public final TagContextBuilder putPropagating(TagKey key, TagValue value) {
+    return put(key, value, METADATA_UNLIMITED_PROPAGATION);
+  }
+
+  /**
    * Removes the key if it exists.
    *
    * @param key the {@code TagKey} which will be removed.
diff --git a/api/src/main/java/io/opencensus/tags/TagMetadata.java b/api/src/main/java/io/opencensus/tags/TagMetadata.java
new file mode 100644
index 0000000..585096a
--- /dev/null
+++ b/api/src/main/java/io/opencensus/tags/TagMetadata.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.tags;
+
+import com.google.auto.value.AutoValue;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * {@link TagMetadata} contains properties associated with a {@link Tag}.
+ *
+ * <p>For now only the property {@link TagTtl} is defined. In future, additional properties may be
+ * added to address specific situations.
+ *
+ * @since 0.20
+ */
+@Immutable
+@AutoValue
+public abstract class TagMetadata {
+
+  TagMetadata() {}
+
+  /**
+   * Creates a {@link TagMetadata} with the given {@link TagTtl}.
+   *
+   * @param tagTtl TTL of a {@code Tag}.
+   * @return a {@code TagMetadata}.
+   * @since 0.20
+   */
+  public static TagMetadata create(TagTtl tagTtl) {
+    return new AutoValue_TagMetadata(tagTtl);
+  }
+
+  /**
+   * Returns the {@link TagTtl} of this {@link TagMetadata}.
+   *
+   * @return the {@code TagTtl}.
+   * @since 0.20
+   */
+  public abstract TagTtl getTagTtl();
+
+  /**
+   * {@link TagTtl} is an integer that represents number of hops a tag can propagate.
+   *
+   * <p>Anytime a sender serializes a tag, sends it over the wire and receiver deserializes the tag
+   * then the tag is considered to have travelled one hop.
+   *
+   * <p>There could be one or more proxy(ies) between sender and receiver. Proxies are treated as
+   * transparent entities and they are not counted as hops.
+   *
+   * <p>For now, only special values of {@link TagTtl} are supported.
+   *
+   * @since 0.20
+   */
+  public enum TagTtl {
+
+    /**
+     * A {@link Tag} with {@link TagTtl#NO_PROPAGATION} is considered to have local scope and is
+     * used within the process where it's created.
+     *
+     * @since 0.20
+     */
+    NO_PROPAGATION(0),
+
+    /**
+     * A {@link Tag} with {@link TagTtl#UNLIMITED_PROPAGATION} can propagate unlimited hops.
+     *
+     * <p>However, it is still subject to outgoing and incoming (on remote side) filter criteria.
+     *
+     * <p>{@link TagTtl#UNLIMITED_PROPAGATION} is typical used to track a request, which may be
+     * processed across multiple entities.
+     *
+     * @since 0.20
+     */
+    UNLIMITED_PROPAGATION(-1);
+
+    private final int hops;
+
+    private TagTtl(int hops) {
+      this.hops = hops;
+    }
+  }
+}
diff --git a/api/src/main/java/io/opencensus/tags/propagation/TagContextBinarySerializer.java b/api/src/main/java/io/opencensus/tags/propagation/TagContextBinarySerializer.java
index 39eb8ce..99fde9d 100644
--- a/api/src/main/java/io/opencensus/tags/propagation/TagContextBinarySerializer.java
+++ b/api/src/main/java/io/opencensus/tags/propagation/TagContextBinarySerializer.java
@@ -16,7 +16,10 @@
 
 package io.opencensus.tags.propagation;
 
+import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 
 /**
  * Object for serializing and deserializing {@link TagContext}s with the binary format.
@@ -34,6 +37,9 @@
    *
    * <p>This method should be the inverse of {@link #fromByteArray}.
    *
+   * <p>{@link Tag}s that have a {@link TagMetadata} with {@link TagTtl#NO_PROPAGATION} will not be
+   * serialized.
+   *
    * @param tags the {@code TagContext} to serialize.
    * @return the on-the-wire representation of a {@code TagContext}.
    * @throws TagContextSerializationException if the result would be larger than the maximum allowed
diff --git a/api/src/main/java/io/opencensus/tags/propagation/TagContextTextFormat.java b/api/src/main/java/io/opencensus/tags/propagation/TagContextTextFormat.java
new file mode 100644
index 0000000..af22827
--- /dev/null
+++ b/api/src/main/java/io/opencensus/tags/propagation/TagContextTextFormat.java
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.tags.propagation;
+
+import io.opencensus.tags.TagContext;
+import java.util.List;
+import javax.annotation.Nullable;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
+/**
+ * Object for injecting and extracting {@link TagContext} as text into carriers that travel in-band
+ * across process boundaries. Tags are often encoded as messaging or RPC request headers.
+ *
+ * <p>When using http, the carrier of propagated data on both the client (injector) and server
+ * (extractor) side is usually an http request. Propagation is usually implemented via library-
+ * specific request interceptors, where the client-side injects tags and the server-side extracts
+ * them.
+ *
+ * <p>Example of usage on the client:
+ *
+ * <pre>{@code
+ * private static final Tagger tagger = Tags.getTagger();
+ * private static final TagContextTextFormat textFormat =
+ *     Tags.getPropagationComponent().getCorrelationContextFormat();
+ * private static final TagContextTextFormat.Setter setter =
+ *     new TagContextTextFormat.Setter<HttpURLConnection>() {
+ *       public void put(HttpURLConnection carrier, String key, String value) {
+ *         carrier.setRequestProperty(field, value);
+ *       }
+ *     };
+ *
+ * void makeHttpRequest() {
+ *   TagContext tagContext = tagger.emptyBuilder().put(K, V).build();
+ *   try (Scope s = tagger.withTagContext(tagContext)) {
+ *     HttpURLConnection connection =
+ *         (HttpURLConnection) new URL("http://myserver").openConnection();
+ *     textFormat.inject(tagContext, connection, httpURLConnectionSetter);
+ *     // Send the request, wait for response and maybe set the status if not ok.
+ *   }
+ * }
+ * }</pre>
+ *
+ * <p>Example of usage on the server:
+ *
+ * <pre>{@code
+ * private static final Tagger tagger = Tags.getTagger();
+ * private static final TagContextTextFormat textFormat =
+ *     Tags.getPropagationComponent().getCorrelationContextFormat();
+ * private static final TagContextTextFormat.Getter<HttpRequest> getter = ...;
+ *
+ * void onRequestReceived(HttpRequest request) {
+ *   TagContext tagContext = textFormat.extract(request, getter);
+ *   try (Scope s = tagger.withTagContext(tagContext)) {
+ *     // Handle request and send response back.
+ *   }
+ * }
+ * }</pre>
+ *
+ * @since 0.21
+ */
+public abstract class TagContextTextFormat {
+
+  /**
+   * The propagation fields defined. If your carrier is reused, you should delete the fields here
+   * before calling {@link #inject(TagContext, Object, Setter)}.
+   *
+   * <p>For example, if the carrier is a single-use or immutable request object, you don't need to
+   * clear fields as they couldn't have been set before. If it is a mutable, retryable object,
+   * successive calls should clear these fields first.
+   *
+   * @since 0.21
+   */
+  // The use cases of this are:
+  // * allow pre-allocation of fields, especially in systems like gRPC Metadata
+  // * allow a single-pass over an iterator (ex OpenTracing has no getter in TextMap)
+  public abstract List<String> fields();
+
+  /**
+   * Injects the tag context downstream. For example, as http headers.
+   *
+   * @param tagContext the tag context.
+   * @param carrier holds propagation fields. For example, an outgoing message or http request.
+   * @param setter invoked for each propagation key to add or remove.
+   * @throws TagContextSerializationException if the given tag context cannot be serialized.
+   * @since 0.21
+   */
+  public abstract <C /*>>> extends @NonNull Object*/> void inject(
+      TagContext tagContext, C carrier, Setter<C> setter) throws TagContextSerializationException;
+
+  /**
+   * Class that allows a {@code TagContextTextFormat} to set propagated fields into a carrier.
+   *
+   * <p>{@code Setter} is stateless and allows to be saved as a constant to avoid runtime
+   * allocations.
+   *
+   * @param <C> carrier of propagation fields, such as an http request
+   * @since 0.21
+   */
+  public abstract static class Setter<C> {
+
+    /**
+     * Replaces a propagated field with the given value.
+     *
+     * <p>For example, a setter for an {@link java.net.HttpURLConnection} would be the method
+     * reference {@link java.net.HttpURLConnection#addRequestProperty(String, String)}
+     *
+     * @param carrier holds propagation fields. For example, an outgoing message or http request.
+     * @param key the key of the field.
+     * @param value the value of the field.
+     * @since 0.21
+     */
+    public abstract void put(C carrier, String key, String value);
+  }
+
+  /**
+   * Extracts the tag context from upstream. For example, as http headers.
+   *
+   * @param carrier holds propagation fields. For example, an outgoing message or http request.
+   * @param getter invoked for each propagation key to get.
+   * @throws TagContextDeserializationException if the input is invalid
+   * @since 0.21
+   */
+  public abstract <C /*>>> extends @NonNull Object*/> TagContext extract(
+      C carrier, Getter<C> getter) throws TagContextDeserializationException;
+
+  /**
+   * Class that allows a {@code TagContextTextFormat} to read propagated fields from a carrier.
+   *
+   * <p>{@code Getter} is stateless and allows to be saved as a constant to avoid runtime
+   * allocations.
+   *
+   * @param <C> carrier of propagation fields, such as an http request
+   * @since 0.21
+   */
+  public abstract static class Getter<C> {
+
+    /**
+     * Returns the first value of the given propagation {@code key} or returns {@code null}.
+     *
+     * @param carrier carrier of propagation fields, such as an http request
+     * @param key the key of the field.
+     * @return the first value of the given propagation {@code key} or returns {@code null}.
+     * @since 0.21
+     */
+    @Nullable
+    public abstract String get(C carrier, String key);
+  }
+}
diff --git a/api/src/main/java/io/opencensus/tags/propagation/TagPropagationComponent.java b/api/src/main/java/io/opencensus/tags/propagation/TagPropagationComponent.java
index 6ececa7..1c4d0ca 100644
--- a/api/src/main/java/io/opencensus/tags/propagation/TagPropagationComponent.java
+++ b/api/src/main/java/io/opencensus/tags/propagation/TagPropagationComponent.java
@@ -23,7 +23,6 @@
  *
  * @since 0.8
  */
-// TODO(sebright): Add an HTTP serializer.
 public abstract class TagPropagationComponent {
 
   /**
@@ -33,4 +32,15 @@
    * @since 0.8
    */
   public abstract TagContextBinarySerializer getBinarySerializer();
+
+  /**
+   * Returns the {@link TagContextTextFormat} for this implementation.
+   *
+   * <p>OpenCensus uses W3C Correlation Context as the HTTP text format. For more details, see <a
+   * href="https://github.com/w3c/correlation-context">correlation-context</a>.
+   *
+   * @return the {@code TagContextTextFormat} for this implementation.
+   * @since 0.21
+   */
+  public abstract TagContextTextFormat getCorrelationContextFormat();
 }
diff --git a/api/src/main/java/io/opencensus/tags/unsafe/ContextUtils.java b/api/src/main/java/io/opencensus/tags/unsafe/ContextUtils.java
index 8936bbb..b0a588e 100644
--- a/api/src/main/java/io/opencensus/tags/unsafe/ContextUtils.java
+++ b/api/src/main/java/io/opencensus/tags/unsafe/ContextUtils.java
@@ -17,12 +17,17 @@
 package io.opencensus.tags.unsafe;
 
 import io.grpc.Context;
+import io.opencensus.internal.Utils;
 import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
 import java.util.Collections;
 import java.util.Iterator;
 import javax.annotation.concurrent.Immutable;
 
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
 /**
  * Utility methods for accessing the {@link TagContext} contained in the {@link io.grpc.Context}.
  *
@@ -39,12 +44,35 @@
   /**
    * The {@link io.grpc.Context.Key} used to interact with the {@code TagContext} contained in the
    * {@link io.grpc.Context}.
-   *
-   * @since 0.8
    */
-  public static final Context.Key<TagContext> TAG_CONTEXT_KEY =
+  private static final Context.Key</*@Nullable*/ TagContext> TAG_CONTEXT_KEY =
       Context.keyWithDefault("opencensus-tag-context-key", EMPTY_TAG_CONTEXT);
 
+  /**
+   * Creates a new {@code Context} with the given value set.
+   *
+   * @param context the parent {@code Context}.
+   * @param tagContext the value to be set.
+   * @return a new context with the given value set.
+   * @since 0.21
+   */
+  public static Context withValue(
+      Context context, @javax.annotation.Nullable TagContext tagContext) {
+    return Utils.checkNotNull(context, "context").withValue(TAG_CONTEXT_KEY, tagContext);
+  }
+
+  /**
+   * Returns the value from the specified {@code Context}.
+   *
+   * @param context the specified {@code Context}.
+   * @return the value from the specified {@code Context}.
+   * @since 0.21
+   */
+  public static TagContext getValue(Context context) {
+    @javax.annotation.Nullable TagContext tags = TAG_CONTEXT_KEY.get(context);
+    return tags == null ? EMPTY_TAG_CONTEXT : tags;
+  }
+
   @Immutable
   private static final class EmptyTagContext extends TagContext {
 
diff --git a/api/src/main/java/io/opencensus/trace/BigendianEncoding.java b/api/src/main/java/io/opencensus/trace/BigendianEncoding.java
new file mode 100644
index 0000000..1ad4477
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/BigendianEncoding.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace;
+
+import io.opencensus.internal.Utils;
+import java.util.Arrays;
+
+final class BigendianEncoding {
+  static final int LONG_BYTES = Long.SIZE / Byte.SIZE;
+  static final int BYTE_BASE16 = 2;
+  static final int LONG_BASE16 = BYTE_BASE16 * LONG_BYTES;
+  private static final String ALPHABET = "0123456789abcdef";
+  private static final int ASCII_CHARACTERS = 128;
+  private static final char[] ENCODING = buildEncodingArray();
+  private static final byte[] DECODING = buildDecodingArray();
+
+  private static char[] buildEncodingArray() {
+    char[] encoding = new char[512];
+    for (int i = 0; i < 256; ++i) {
+      encoding[i] = ALPHABET.charAt(i >>> 4);
+      encoding[i | 0x100] = ALPHABET.charAt(i & 0xF);
+    }
+    return encoding;
+  }
+
+  private static byte[] buildDecodingArray() {
+    byte[] decoding = new byte[ASCII_CHARACTERS];
+    Arrays.fill(decoding, (byte) -1);
+    for (int i = 0; i < ALPHABET.length(); i++) {
+      char c = ALPHABET.charAt(i);
+      decoding[c] = (byte) i;
+    }
+    return decoding;
+  }
+
+  /**
+   * Returns the {@code long} value whose big-endian representation is stored in the first 8 bytes
+   * of {@code bytes} starting from the {@code offset}.
+   *
+   * @param bytes the byte array representation of the {@code long}.
+   * @param offset the starting offset in the byte array.
+   * @return the {@code long} value whose big-endian representation is given.
+   * @throws IllegalArgumentException if {@code bytes} has fewer than 8 elements.
+   */
+  static long longFromByteArray(byte[] bytes, int offset) {
+    Utils.checkArgument(bytes.length >= offset + LONG_BYTES, "array too small");
+    return (bytes[offset] & 0xFFL) << 56
+        | (bytes[offset + 1] & 0xFFL) << 48
+        | (bytes[offset + 2] & 0xFFL) << 40
+        | (bytes[offset + 3] & 0xFFL) << 32
+        | (bytes[offset + 4] & 0xFFL) << 24
+        | (bytes[offset + 5] & 0xFFL) << 16
+        | (bytes[offset + 6] & 0xFFL) << 8
+        | (bytes[offset + 7] & 0xFFL);
+  }
+
+  /**
+   * Stores the big-endian representation of {@code value} in the {@code dest} starting from the
+   * {@code destOffset}.
+   *
+   * @param value the value to be converted.
+   * @param dest the destination byte array.
+   * @param destOffset the starting offset in the destination byte array.
+   */
+  static void longToByteArray(long value, byte[] dest, int destOffset) {
+    Utils.checkArgument(dest.length >= destOffset + LONG_BYTES, "array too small");
+    dest[destOffset + 7] = (byte) (value & 0xFFL);
+    dest[destOffset + 6] = (byte) (value >> 8 & 0xFFL);
+    dest[destOffset + 5] = (byte) (value >> 16 & 0xFFL);
+    dest[destOffset + 4] = (byte) (value >> 24 & 0xFFL);
+    dest[destOffset + 3] = (byte) (value >> 32 & 0xFFL);
+    dest[destOffset + 2] = (byte) (value >> 40 & 0xFFL);
+    dest[destOffset + 1] = (byte) (value >> 48 & 0xFFL);
+    dest[destOffset] = (byte) (value >> 56 & 0xFFL);
+  }
+
+  /**
+   * Returns the {@code long} value whose base16 representation is stored in the first 16 chars of
+   * {@code chars} starting from the {@code offset}.
+   *
+   * @param chars the base16 representation of the {@code long}.
+   * @param offset the starting offset in the {@code CharSequence}.
+   */
+  static long longFromBase16String(CharSequence chars, int offset) {
+    Utils.checkArgument(chars.length() >= offset + LONG_BASE16, "chars too small");
+    return (decodeByte(chars.charAt(offset), chars.charAt(offset + 1)) & 0xFFL) << 56
+        | (decodeByte(chars.charAt(offset + 2), chars.charAt(offset + 3)) & 0xFFL) << 48
+        | (decodeByte(chars.charAt(offset + 4), chars.charAt(offset + 5)) & 0xFFL) << 40
+        | (decodeByte(chars.charAt(offset + 6), chars.charAt(offset + 7)) & 0xFFL) << 32
+        | (decodeByte(chars.charAt(offset + 8), chars.charAt(offset + 9)) & 0xFFL) << 24
+        | (decodeByte(chars.charAt(offset + 10), chars.charAt(offset + 11)) & 0xFFL) << 16
+        | (decodeByte(chars.charAt(offset + 12), chars.charAt(offset + 13)) & 0xFFL) << 8
+        | (decodeByte(chars.charAt(offset + 14), chars.charAt(offset + 15)) & 0xFFL);
+  }
+
+  /**
+   * Appends the base16 encoding of the specified {@code value} to the {@code dest}.
+   *
+   * @param value the value to be converted.
+   * @param dest the destination char array.
+   * @param destOffset the starting offset in the destination char array.
+   */
+  static void longToBase16String(long value, char[] dest, int destOffset) {
+    byteToBase16((byte) (value >> 56 & 0xFFL), dest, destOffset);
+    byteToBase16((byte) (value >> 48 & 0xFFL), dest, destOffset + BYTE_BASE16);
+    byteToBase16((byte) (value >> 40 & 0xFFL), dest, destOffset + 2 * BYTE_BASE16);
+    byteToBase16((byte) (value >> 32 & 0xFFL), dest, destOffset + 3 * BYTE_BASE16);
+    byteToBase16((byte) (value >> 24 & 0xFFL), dest, destOffset + 4 * BYTE_BASE16);
+    byteToBase16((byte) (value >> 16 & 0xFFL), dest, destOffset + 5 * BYTE_BASE16);
+    byteToBase16((byte) (value >> 8 & 0xFFL), dest, destOffset + 6 * BYTE_BASE16);
+    byteToBase16((byte) (value & 0xFFL), dest, destOffset + 7 * BYTE_BASE16);
+  }
+
+  /**
+   * Encodes the specified byte, and returns the encoded {@code String}.
+   *
+   * @param value the value to be converted.
+   * @param dest the destination char array.
+   * @param destOffset the starting offset in the destination char array.
+   */
+  static void byteToBase16String(byte value, char[] dest, int destOffset) {
+    byteToBase16(value, dest, destOffset);
+  }
+
+  /**
+   * Decodes the specified two character sequence, and returns the resulting {@code byte}.
+   *
+   * @param chars the character sequence to be decoded.
+   * @param offset the starting offset in the {@code CharSequence}.
+   * @return the resulting {@code byte}
+   * @throws IllegalArgumentException if the input is not a valid encoded string according to this
+   *     encoding.
+   */
+  static byte byteFromBase16String(CharSequence chars, int offset) {
+    Utils.checkArgument(chars.length() >= offset + 2, "chars too small");
+    return decodeByte(chars.charAt(offset), chars.charAt(offset + 1));
+  }
+
+  private static byte decodeByte(char hi, char lo) {
+    Utils.checkArgument(lo < ASCII_CHARACTERS && DECODING[lo] != -1, "invalid character " + lo);
+    Utils.checkArgument(hi < ASCII_CHARACTERS && DECODING[hi] != -1, "invalid character " + hi);
+    int decoded = DECODING[hi] << 4 | DECODING[lo];
+    return (byte) decoded;
+  }
+
+  private static void byteToBase16(byte value, char[] dest, int destOffset) {
+    int b = value & 0xFF;
+    dest[destOffset] = ENCODING[b];
+    dest[destOffset + 1] = ENCODING[b | 0x100];
+  }
+
+  private BigendianEncoding() {}
+}
diff --git a/api/src/main/java/io/opencensus/trace/ContextHandle.java b/api/src/main/java/io/opencensus/trace/ContextHandle.java
new file mode 100644
index 0000000..985d261
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/ContextHandle.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2016-17, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace;
+
+public interface ContextHandle {
+
+  ContextHandle attach();
+
+  void detach(ContextHandle contextHandle);
+}
diff --git a/api/src/main/java/io/opencensus/trace/ContextManager.java b/api/src/main/java/io/opencensus/trace/ContextManager.java
new file mode 100644
index 0000000..c13ecc0
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/ContextManager.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2016-17, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace;
+
+public interface ContextManager {
+
+  ContextHandle currentContext();
+
+  ContextHandle withValue(ContextHandle contextHandle, @javax.annotation.Nullable Span span);
+
+  Span getValue(ContextHandle contextHandle);
+}
diff --git a/api/src/main/java/io/opencensus/trace/CurrentSpanUtils.java b/api/src/main/java/io/opencensus/trace/CurrentSpanUtils.java
index aa2f055..c09c975 100644
--- a/api/src/main/java/io/opencensus/trace/CurrentSpanUtils.java
+++ b/api/src/main/java/io/opencensus/trace/CurrentSpanUtils.java
@@ -16,14 +16,14 @@
 
 package io.opencensus.trace;
 
-import io.grpc.Context;
 import io.opencensus.common.Scope;
-import io.opencensus.trace.unsafe.ContextUtils;
+import io.opencensus.trace.unsafe.ContextHandleUtils;
 import java.util.concurrent.Callable;
 import javax.annotation.Nullable;
 
 /** Util methods/functionality to interact with the {@link Span} in the {@link io.grpc.Context}. */
 final class CurrentSpanUtils {
+
   // No instance of this class.
   private CurrentSpanUtils() {}
 
@@ -34,7 +34,7 @@
    */
   @Nullable
   static Span getCurrentSpan() {
-    return ContextUtils.CONTEXT_SPAN_KEY.get();
+    return ContextHandleUtils.getValue(ContextHandleUtils.currentContext());
   }
 
   /**
@@ -78,7 +78,8 @@
 
   // Defines an arbitrary scope of code as a traceable operation. Supports try-with-resources idiom.
   private static final class ScopeInSpan implements Scope {
-    private final Context origContext;
+
+    private final ContextHandle origContext;
     private final Span span;
     private final boolean endSpan;
 
@@ -90,12 +91,13 @@
     private ScopeInSpan(Span span, boolean endSpan) {
       this.span = span;
       this.endSpan = endSpan;
-      origContext = Context.current().withValue(ContextUtils.CONTEXT_SPAN_KEY, span).attach();
+      origContext =
+          ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), span).attach();
     }
 
     @Override
     public void close() {
-      Context.current().detach(origContext);
+      ContextHandleUtils.currentContext().detach(origContext);
       if (endSpan) {
         span.end();
       }
@@ -103,6 +105,7 @@
   }
 
   private static final class RunnableInSpan implements Runnable {
+
     // TODO(bdrutu): Investigate if the extra private visibility increases the generated bytecode.
     private final Span span;
     private final Runnable runnable;
@@ -116,8 +119,8 @@
 
     @Override
     public void run() {
-      Context origContext =
-          Context.current().withValue(ContextUtils.CONTEXT_SPAN_KEY, span).attach();
+      ContextHandle origContext =
+          ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), span).attach();
       try {
         runnable.run();
       } catch (Throwable t) {
@@ -129,7 +132,7 @@
         }
         throw new RuntimeException("unexpected", t);
       } finally {
-        Context.current().detach(origContext);
+        ContextHandleUtils.currentContext().detach(origContext);
         if (endSpan) {
           span.end();
         }
@@ -138,6 +141,7 @@
   }
 
   private static final class CallableInSpan<V> implements Callable<V> {
+
     private final Span span;
     private final Callable<V> callable;
     private final boolean endSpan;
@@ -150,8 +154,8 @@
 
     @Override
     public V call() throws Exception {
-      Context origContext =
-          Context.current().withValue(ContextUtils.CONTEXT_SPAN_KEY, span).attach();
+      ContextHandle origContext =
+          ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), span).attach();
       try {
         return callable.call();
       } catch (Exception e) {
@@ -164,7 +168,7 @@
         }
         throw new RuntimeException("unexpected", t);
       } finally {
-        Context.current().detach(origContext);
+        ContextHandleUtils.currentContext().detach(origContext);
         if (endSpan) {
           span.end();
         }
diff --git a/api/src/main/java/io/opencensus/trace/EndSpanOptions.java b/api/src/main/java/io/opencensus/trace/EndSpanOptions.java
index b0d9a47..657724b 100644
--- a/api/src/main/java/io/opencensus/trace/EndSpanOptions.java
+++ b/api/src/main/java/io/opencensus/trace/EndSpanOptions.java
@@ -53,10 +53,6 @@
    * io.opencensus.trace.export.SampledSpanStore#registerSpanNamesForCollection(Collection)} in
    * advance for this span name.
    *
-   * <p>It is strongly recommended to use the {@link
-   * io.opencensus.trace.export.SampledSpanStore#registerSpanNamesForCollection(Collection)} API
-   * instead.
-   *
    * @return {@code true} if the name of the {@code Span} should be registered to the {@code
    *     io.opencensus.trace.export.SampledSpanStore}.
    * @since 0.8
diff --git a/api/src/main/java/io/opencensus/trace/LowerCaseBase16Encoding.java b/api/src/main/java/io/opencensus/trace/LowerCaseBase16Encoding.java
deleted file mode 100644
index bca9586..0000000
--- a/api/src/main/java/io/opencensus/trace/LowerCaseBase16Encoding.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.trace;
-
-import io.opencensus.internal.Utils;
-import java.util.Arrays;
-
-/** Internal copy of the Guava implementation of the {@code BaseEncoding.base16().lowerCase()}. */
-final class LowerCaseBase16Encoding {
-  private static final String ALPHABET = "0123456789abcdef";
-  private static final int ASCII_CHARACTERS = 128;
-  private static final char[] ENCODING = buildEncodingArray();
-  private static final byte[] DECODING = buildDecodingArray();
-
-  private static char[] buildEncodingArray() {
-    char[] encoding = new char[512];
-    for (int i = 0; i < 256; ++i) {
-      encoding[i] = ALPHABET.charAt(i >>> 4);
-      encoding[i | 0x100] = ALPHABET.charAt(i & 0xF);
-    }
-    return encoding;
-  }
-
-  private static byte[] buildDecodingArray() {
-    byte[] decoding = new byte[ASCII_CHARACTERS];
-    Arrays.fill(decoding, (byte) -1);
-    for (int i = 0; i < ALPHABET.length(); i++) {
-      char c = ALPHABET.charAt(i);
-      decoding[c] = (byte) i;
-    }
-    return decoding;
-  }
-
-  /**
-   * Encodes the specified byte array, and returns the encoded {@code String}.
-   *
-   * @param bytes byte array to be encoded.
-   * @return the encoded {@code String}.
-   */
-  static String encodeToString(byte[] bytes) {
-    StringBuilder stringBuilder = new StringBuilder(bytes.length * 2);
-    for (byte byteVal : bytes) {
-      int b = byteVal & 0xFF;
-      stringBuilder.append(ENCODING[b]);
-      stringBuilder.append(ENCODING[b | 0x100]);
-    }
-    return stringBuilder.toString();
-  }
-
-  /**
-   * Decodes the specified character sequence, and returns the resulting {@code byte[]}.
-   *
-   * @param chars the character sequence to be decoded.
-   * @return the resulting {@code byte[]}
-   * @throws IllegalArgumentException if the input is not a valid encoded string according to this
-   *     encoding.
-   */
-  static byte[] decodeToBytes(CharSequence chars) {
-    Utils.checkArgument(chars.length() % 2 == 0, "Invalid input length " + chars.length());
-    int bytesWritten = 0;
-    byte[] bytes = new byte[chars.length() / 2];
-    for (int i = 0; i < chars.length(); i += 2) {
-      bytes[bytesWritten++] = decodeByte(chars.charAt(i), chars.charAt(i + 1));
-    }
-    return bytes;
-  }
-
-  private static byte decodeByte(char hi, char lo) {
-    Utils.checkArgument(lo < ASCII_CHARACTERS && DECODING[lo] != -1, "Invalid character " + lo);
-    Utils.checkArgument(hi < ASCII_CHARACTERS && DECODING[hi] != -1, "Invalid character " + hi);
-    int decoded = DECODING[hi] << 4 | DECODING[lo];
-    return (byte) decoded;
-  }
-
-  // Private constructor to disallow instances.
-  private LowerCaseBase16Encoding() {}
-}
diff --git a/api/src/main/java/io/opencensus/trace/Span.java b/api/src/main/java/io/opencensus/trace/Span.java
index 8f8253b..79d6de5 100644
--- a/api/src/main/java/io/opencensus/trace/Span.java
+++ b/api/src/main/java/io/opencensus/trace/Span.java
@@ -80,7 +80,7 @@
             ? DEFAULT_OPTIONS
             : Collections.<Options>unmodifiableSet(EnumSet.copyOf(options));
     Utils.checkArgument(
-        !context.getTraceOptions().isSampled() || (this.options.contains(Options.RECORD_EVENTS)),
+        !context.getTraceOptions().isSampled() || this.options.contains(Options.RECORD_EVENTS),
         "Span is sampled, but does not have RECORD_EVENTS set.");
   }
 
diff --git a/api/src/main/java/io/opencensus/trace/SpanId.java b/api/src/main/java/io/opencensus/trace/SpanId.java
index c43fa6b..a6731a5 100644
--- a/api/src/main/java/io/opencensus/trace/SpanId.java
+++ b/api/src/main/java/io/opencensus/trace/SpanId.java
@@ -17,7 +17,6 @@
 package io.opencensus.trace;
 
 import io.opencensus.internal.Utils;
-import java.util.Arrays;
 import java.util.Random;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
@@ -37,43 +36,37 @@
    */
   public static final int SIZE = 8;
 
-  private static final int HEX_SIZE = 2 * SIZE;
-
   /**
    * The invalid {@code SpanId}. All bytes are 0.
    *
    * @since 0.5
    */
-  public static final SpanId INVALID = new SpanId(new byte[SIZE]);
+  public static final SpanId INVALID = new SpanId(0);
+
+  private static final int BASE16_SIZE = 2 * SIZE;
+  private static final long INVALID_ID = 0;
 
   // The internal representation of the SpanId.
-  private final byte[] bytes;
+  private final long id;
 
-  private SpanId(byte[] bytes) {
-    this.bytes = bytes;
+  private SpanId(long id) {
+    this.id = id;
   }
 
   /**
    * Returns a {@code SpanId} built from a byte representation.
    *
-   * <p>Equivalent with:
-   *
-   * <pre>{@code
-   * SpanId.fromBytes(buffer, 0);
-   * }</pre>
-   *
-   * @param buffer the representation of the {@code SpanId}.
-   * @return a {@code SpanId} whose representation is given by the {@code buffer} parameter.
-   * @throws NullPointerException if {@code buffer} is null.
-   * @throws IllegalArgumentException if {@code buffer.length} is not {@link SpanId#SIZE}.
+   * @param src the representation of the {@code SpanId}.
+   * @return a {@code SpanId} whose representation is given by the {@code src} parameter.
+   * @throws NullPointerException if {@code src} is null.
+   * @throws IllegalArgumentException if {@code src.length} is not {@link SpanId#SIZE}.
    * @since 0.5
    */
-  public static SpanId fromBytes(byte[] buffer) {
-    Utils.checkNotNull(buffer, "buffer");
-    Utils.checkArgument(
-        buffer.length == SIZE, "Invalid size: expected %s, got %s", SIZE, buffer.length);
-    byte[] bytesCopied = Arrays.copyOf(buffer, SIZE);
-    return new SpanId(bytesCopied);
+  public static SpanId fromBytes(byte[] src) {
+    Utils.checkNotNull(src, "src");
+    // TODO: Remove this extra condition.
+    Utils.checkArgument(src.length == SIZE, "Invalid size: expected %s, got %s", SIZE, src.length);
+    return fromBytes(src, 0);
   }
 
   /**
@@ -90,9 +83,8 @@
    * @since 0.5
    */
   public static SpanId fromBytes(byte[] src, int srcOffset) {
-    byte[] bytes = new byte[SIZE];
-    System.arraycopy(src, srcOffset, bytes, 0, SIZE);
-    return new SpanId(bytes);
+    Utils.checkNotNull(src, "src");
+    return new SpanId(BigendianEncoding.longFromByteArray(src, srcOffset));
   }
 
   /**
@@ -106,9 +98,31 @@
    * @since 0.11
    */
   public static SpanId fromLowerBase16(CharSequence src) {
+    Utils.checkNotNull(src, "src");
+    // TODO: Remove this extra condition.
     Utils.checkArgument(
-        src.length() == HEX_SIZE, "Invalid size: expected %s, got %s", HEX_SIZE, src.length());
-    return new SpanId(LowerCaseBase16Encoding.decodeToBytes(src));
+        src.length() == BASE16_SIZE,
+        "Invalid size: expected %s, got %s",
+        BASE16_SIZE,
+        src.length());
+    return fromLowerBase16(src, 0);
+  }
+
+  /**
+   * Returns a {@code SpanId} built from a lowercase base16 representation.
+   *
+   * @param src the lowercase base16 representation.
+   * @param srcOffset the offset in the buffer where the representation of the {@code SpanId}
+   *     begins.
+   * @return a {@code SpanId} built from a lowercase base16 representation.
+   * @throws NullPointerException if {@code src} is null.
+   * @throws IllegalArgumentException if not enough characters in the {@code src} from the {@code
+   *     srcOffset}.
+   * @since 0.11
+   */
+  public static SpanId fromLowerBase16(CharSequence src, int srcOffset) {
+    Utils.checkNotNull(src, "src");
+    return new SpanId(BigendianEncoding.longFromBase16String(src, srcOffset));
   }
 
   /**
@@ -119,11 +133,11 @@
    * @since 0.5
    */
   public static SpanId generateRandomId(Random random) {
-    byte[] bytes = new byte[SIZE];
+    long id;
     do {
-      random.nextBytes(bytes);
-    } while (Arrays.equals(bytes, INVALID.bytes));
-    return new SpanId(bytes);
+      id = random.nextLong();
+    } while (id == INVALID_ID);
+    return new SpanId(id);
   }
 
   /**
@@ -133,19 +147,15 @@
    * @since 0.5
    */
   public byte[] getBytes() {
-    return Arrays.copyOf(bytes, SIZE);
+    byte[] bytes = new byte[SIZE];
+    BigendianEncoding.longToByteArray(id, bytes, 0);
+    return bytes;
   }
 
   /**
    * Copies the byte array representations of the {@code SpanId} into the {@code dest} beginning at
    * the {@code destOffset} offset.
    *
-   * <p>Equivalent with (but faster because it avoids any new allocations):
-   *
-   * <pre>{@code
-   * System.arraycopy(getBytes(), 0, dest, destOffset, SpanId.SIZE);
-   * }</pre>
-   *
    * @param dest the destination buffer.
    * @param destOffset the starting offset in the destination buffer.
    * @throws NullPointerException if {@code dest} is null.
@@ -154,7 +164,21 @@
    * @since 0.5
    */
   public void copyBytesTo(byte[] dest, int destOffset) {
-    System.arraycopy(bytes, 0, dest, destOffset, SIZE);
+    BigendianEncoding.longToByteArray(id, dest, destOffset);
+  }
+
+  /**
+   * Copies the lowercase base16 representations of the {@code SpanId} into the {@code dest}
+   * beginning at the {@code destOffset} offset.
+   *
+   * @param dest the destination buffer.
+   * @param destOffset the starting offset in the destination buffer.
+   * @throws IndexOutOfBoundsException if {@code destOffset + 2 * SpanId.SIZE} is greater than
+   *     {@code dest.length}.
+   * @since 0.18
+   */
+  public void copyLowerBase16To(char[] dest, int destOffset) {
+    BigendianEncoding.longToBase16String(id, dest, destOffset);
   }
 
   /**
@@ -165,7 +189,7 @@
    * @since 0.5
    */
   public boolean isValid() {
-    return !Arrays.equals(bytes, INVALID.bytes);
+    return id != INVALID_ID;
   }
 
   /**
@@ -175,7 +199,9 @@
    * @since 0.11
    */
   public String toLowerBase16() {
-    return LowerCaseBase16Encoding.encodeToString(bytes);
+    char[] chars = new char[BASE16_SIZE];
+    copyLowerBase16To(chars, 0);
+    return new String(chars);
   }
 
   @Override
@@ -189,12 +215,13 @@
     }
 
     SpanId that = (SpanId) obj;
-    return Arrays.equals(bytes, that.bytes);
+    return id == that.id;
   }
 
   @Override
   public int hashCode() {
-    return Arrays.hashCode(bytes);
+    // Copied from Long.hashCode in java8.
+    return (int) (id ^ (id >>> 32));
   }
 
   @Override
@@ -204,11 +231,7 @@
 
   @Override
   public int compareTo(SpanId that) {
-    for (int i = 0; i < SIZE; i++) {
-      if (bytes[i] != that.bytes[i]) {
-        return bytes[i] < that.bytes[i] ? -1 : 1;
-      }
-    }
-    return 0;
+    // Copied from Long.compare in java8.
+    return (id < that.id) ? -1 : ((id == that.id) ? 0 : 1);
   }
 }
diff --git a/api/src/main/java/io/opencensus/trace/Status.java b/api/src/main/java/io/opencensus/trace/Status.java
index 1fa8508..63f5f89 100644
--- a/api/src/main/java/io/opencensus/trace/Status.java
+++ b/api/src/main/java/io/opencensus/trace/Status.java
@@ -393,7 +393,7 @@
    * @return The newly created {@code Status} with the given description.
    * @since 0.5
    */
-  public Status withDescription(String description) {
+  public Status withDescription(@Nullable String description) {
     if (Utils.equalsObjects(this.description, description)) {
       return this;
     }
diff --git a/api/src/main/java/io/opencensus/trace/TraceId.java b/api/src/main/java/io/opencensus/trace/TraceId.java
index 465e4d4..249bc14 100644
--- a/api/src/main/java/io/opencensus/trace/TraceId.java
+++ b/api/src/main/java/io/opencensus/trace/TraceId.java
@@ -18,7 +18,6 @@
 
 import io.opencensus.common.Internal;
 import io.opencensus.internal.Utils;
-import java.util.Arrays;
 import java.util.Random;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
@@ -38,43 +37,38 @@
    */
   public static final int SIZE = 16;
 
-  private static final int HEX_SIZE = 32;
+  private static final int BASE16_SIZE = 2 * BigendianEncoding.LONG_BASE16;
+  private static final long INVALID_ID = 0;
 
   /**
    * The invalid {@code TraceId}. All bytes are '\0'.
    *
    * @since 0.5
    */
-  public static final TraceId INVALID = new TraceId(new byte[SIZE]);
+  public static final TraceId INVALID = new TraceId(INVALID_ID, INVALID_ID);
 
   // The internal representation of the TraceId.
-  private final byte[] bytes;
+  private final long idHi;
+  private final long idLo;
 
-  private TraceId(byte[] bytes) {
-    this.bytes = bytes;
+  private TraceId(long idHi, long idLo) {
+    this.idHi = idHi;
+    this.idLo = idLo;
   }
 
   /**
    * Returns a {@code TraceId} built from a byte representation.
    *
-   * <p>Equivalent with:
-   *
-   * <pre>{@code
-   * TraceId.fromBytes(buffer, 0);
-   * }</pre>
-   *
-   * @param buffer the representation of the {@code TraceId}.
-   * @return a {@code TraceId} whose representation is given by the {@code buffer} parameter.
-   * @throws NullPointerException if {@code buffer} is null.
-   * @throws IllegalArgumentException if {@code buffer.length} is not {@link TraceId#SIZE}.
+   * @param src the representation of the {@code TraceId}.
+   * @return a {@code TraceId} whose representation is given by the {@code src} parameter.
+   * @throws NullPointerException if {@code src} is null.
+   * @throws IllegalArgumentException if {@code src.length} is not {@link TraceId#SIZE}.
    * @since 0.5
    */
-  public static TraceId fromBytes(byte[] buffer) {
-    Utils.checkNotNull(buffer, "buffer");
-    Utils.checkArgument(
-        buffer.length == SIZE, "Invalid size: expected %s, got %s", SIZE, buffer.length);
-    byte[] bytesCopied = Arrays.copyOf(buffer, SIZE);
-    return new TraceId(bytesCopied);
+  public static TraceId fromBytes(byte[] src) {
+    Utils.checkNotNull(src, "src");
+    Utils.checkArgument(src.length == SIZE, "Invalid size: expected %s, got %s", SIZE, src.length);
+    return fromBytes(src, 0);
   }
 
   /**
@@ -91,9 +85,10 @@
    * @since 0.5
    */
   public static TraceId fromBytes(byte[] src, int srcOffset) {
-    byte[] bytes = new byte[SIZE];
-    System.arraycopy(src, srcOffset, bytes, 0, SIZE);
-    return new TraceId(bytes);
+    Utils.checkNotNull(src, "src");
+    return new TraceId(
+        BigendianEncoding.longFromByteArray(src, srcOffset),
+        BigendianEncoding.longFromByteArray(src, srcOffset + BigendianEncoding.LONG_BYTES));
   }
 
   /**
@@ -107,9 +102,32 @@
    * @since 0.11
    */
   public static TraceId fromLowerBase16(CharSequence src) {
+    Utils.checkNotNull(src, "src");
     Utils.checkArgument(
-        src.length() == HEX_SIZE, "Invalid size: expected %s, got %s", HEX_SIZE, src.length());
-    return new TraceId(LowerCaseBase16Encoding.decodeToBytes(src));
+        src.length() == BASE16_SIZE,
+        "Invalid size: expected %s, got %s",
+        BASE16_SIZE,
+        src.length());
+    return fromLowerBase16(src, 0);
+  }
+
+  /**
+   * Returns a {@code TraceId} built from a lowercase base16 representation.
+   *
+   * @param src the lowercase base16 representation.
+   * @param srcOffset the offset in the buffer where the representation of the {@code TraceId}
+   *     begins.
+   * @return a {@code TraceId} built from a lowercase base16 representation.
+   * @throws NullPointerException if {@code src} is null.
+   * @throws IllegalArgumentException if not enough characters in the {@code src} from the {@code
+   *     srcOffset}.
+   * @since 0.11
+   */
+  public static TraceId fromLowerBase16(CharSequence src, int srcOffset) {
+    Utils.checkNotNull(src, "src");
+    return new TraceId(
+        BigendianEncoding.longFromBase16String(src, srcOffset),
+        BigendianEncoding.longFromBase16String(src, srcOffset + BigendianEncoding.LONG_BASE16));
   }
 
   /**
@@ -120,11 +138,13 @@
    * @since 0.5
    */
   public static TraceId generateRandomId(Random random) {
-    byte[] bytes = new byte[SIZE];
+    long idHi;
+    long idLo;
     do {
-      random.nextBytes(bytes);
-    } while (Arrays.equals(bytes, INVALID.bytes));
-    return new TraceId(bytes);
+      idHi = random.nextLong();
+      idLo = random.nextLong();
+    } while (idHi == INVALID_ID && idLo == INVALID_ID);
+    return new TraceId(idHi, idLo);
   }
 
   /**
@@ -134,19 +154,16 @@
    * @since 0.5
    */
   public byte[] getBytes() {
-    return Arrays.copyOf(bytes, SIZE);
+    byte[] bytes = new byte[SIZE];
+    BigendianEncoding.longToByteArray(idHi, bytes, 0);
+    BigendianEncoding.longToByteArray(idLo, bytes, BigendianEncoding.LONG_BYTES);
+    return bytes;
   }
 
   /**
    * Copies the byte array representations of the {@code TraceId} into the {@code dest} beginning at
    * the {@code destOffset} offset.
    *
-   * <p>Equivalent with (but faster because it avoids any new allocations):
-   *
-   * <pre>{@code
-   * System.arraycopy(getBytes(), 0, dest, destOffset, TraceId.SIZE);
-   * }</pre>
-   *
    * @param dest the destination buffer.
    * @param destOffset the starting offset in the destination buffer.
    * @throws NullPointerException if {@code dest} is null.
@@ -155,7 +172,23 @@
    * @since 0.5
    */
   public void copyBytesTo(byte[] dest, int destOffset) {
-    System.arraycopy(bytes, 0, dest, destOffset, SIZE);
+    BigendianEncoding.longToByteArray(idHi, dest, destOffset);
+    BigendianEncoding.longToByteArray(idLo, dest, destOffset + BigendianEncoding.LONG_BYTES);
+  }
+
+  /**
+   * Copies the lowercase base16 representations of the {@code TraceId} into the {@code dest}
+   * beginning at the {@code destOffset} offset.
+   *
+   * @param dest the destination buffer.
+   * @param destOffset the starting offset in the destination buffer.
+   * @throws IndexOutOfBoundsException if {@code destOffset + 2 * TraceId.SIZE} is greater than
+   *     {@code dest.length}.
+   * @since 0.18
+   */
+  public void copyLowerBase16To(char[] dest, int destOffset) {
+    BigendianEncoding.longToBase16String(idHi, dest, destOffset);
+    BigendianEncoding.longToBase16String(idLo, dest, destOffset + BASE16_SIZE / 2);
   }
 
   /**
@@ -166,7 +199,7 @@
    * @since 0.5
    */
   public boolean isValid() {
-    return !Arrays.equals(bytes, INVALID.bytes);
+    return idHi != INVALID_ID || idLo != INVALID_ID;
   }
 
   /**
@@ -176,7 +209,9 @@
    * @since 0.11
    */
   public String toLowerBase16() {
-    return LowerCaseBase16Encoding.encodeToString(bytes);
+    char[] chars = new char[BASE16_SIZE];
+    copyLowerBase16To(chars, 0);
+    return new String(chars);
   }
 
   /**
@@ -189,15 +224,7 @@
    */
   @Internal
   public long getLowerLong() {
-    long result = 0;
-    for (int i = 0; i < Long.SIZE / Byte.SIZE; i++) {
-      result <<= Byte.SIZE;
-      result |= (bytes[i] & 0xff);
-    }
-    if (result < 0) {
-      return -result;
-    }
-    return result;
+    return (idHi < 0) ? -idHi : idHi;
   }
 
   @Override
@@ -211,12 +238,16 @@
     }
 
     TraceId that = (TraceId) obj;
-    return Arrays.equals(bytes, that.bytes);
+    return idHi == that.idHi && idLo == that.idLo;
   }
 
   @Override
   public int hashCode() {
-    return Arrays.hashCode(bytes);
+    // Copied from Arrays.hashCode(long[])
+    int result = 1;
+    result = 31 * result + ((int) (idHi ^ (idHi >>> 32)));
+    result = 31 * result + ((int) (idLo ^ (idLo >>> 32)));
+    return result;
   }
 
   @Override
@@ -226,11 +257,12 @@
 
   @Override
   public int compareTo(TraceId that) {
-    for (int i = 0; i < SIZE; i++) {
-      if (bytes[i] != that.bytes[i]) {
-        return bytes[i] < that.bytes[i] ? -1 : 1;
+    if (idHi == that.idHi) {
+      if (idLo == that.idLo) {
+        return 0;
       }
+      return idLo < that.idLo ? -1 : 1;
     }
-    return 0;
+    return idHi < that.idHi ? -1 : 1;
   }
 }
diff --git a/api/src/main/java/io/opencensus/trace/TraceOptions.java b/api/src/main/java/io/opencensus/trace/TraceOptions.java
index 218f4da..13639d7 100644
--- a/api/src/main/java/io/opencensus/trace/TraceOptions.java
+++ b/api/src/main/java/io/opencensus/trace/TraceOptions.java
@@ -43,6 +43,8 @@
    */
   public static final int SIZE = 1;
 
+  private static final int BASE16_SIZE = 2 * SIZE;
+
   /**
    * The default {@code TraceOptions}.
    *
@@ -115,6 +117,22 @@
   }
 
   /**
+   * Returns a {@code TraceOption} built from a lowercase base16 representation.
+   *
+   * @param src the lowercase base16 representation.
+   * @param srcOffset the offset in the buffer where the representation of the {@code TraceOptions}
+   *     begins.
+   * @return a {@code TraceOption} built from a lowercase base16 representation.
+   * @throws NullPointerException if {@code src} is null.
+   * @throws IllegalArgumentException if {@code src.length} is not {@code 2 * TraceOption.SIZE} OR
+   *     if the {@code str} has invalid characters.
+   * @since 0.18
+   */
+  public static TraceOptions fromLowerBase16(CharSequence src, int srcOffset) {
+    return new TraceOptions(BigendianEncoding.byteFromBase16String(src, srcOffset));
+  }
+
+  /**
    * Returns the one byte representation of the {@code TraceOptions}.
    *
    * @return the one byte representation of the {@code TraceOptions}.
@@ -161,6 +179,32 @@
   }
 
   /**
+   * Copies the lowercase base16 representations of the {@code TraceId} into the {@code dest}
+   * beginning at the {@code destOffset} offset.
+   *
+   * @param dest the destination buffer.
+   * @param destOffset the starting offset in the destination buffer.
+   * @throws IndexOutOfBoundsException if {@code destOffset + 2} is greater than {@code
+   *     dest.length}.
+   * @since 0.18
+   */
+  public void copyLowerBase16To(char[] dest, int destOffset) {
+    BigendianEncoding.byteToBase16String(options, dest, destOffset);
+  }
+
+  /**
+   * Returns the lowercase base16 encoding of this {@code TraceOptions}.
+   *
+   * @return the lowercase base16 encoding of this {@code TraceOptions}.
+   * @since 0.18
+   */
+  public String toLowerBase16() {
+    char[] chars = new char[BASE16_SIZE];
+    copyLowerBase16To(chars, 0);
+    return new String(chars);
+  }
+
+  /**
    * Returns a new {@link Builder} with default options.
    *
    * @return a new {@code Builder} with default options.
diff --git a/api/src/main/java/io/opencensus/trace/Tracing.java b/api/src/main/java/io/opencensus/trace/Tracing.java
index f55cd77..9de1530 100644
--- a/api/src/main/java/io/opencensus/trace/Tracing.java
+++ b/api/src/main/java/io/opencensus/trace/Tracing.java
@@ -32,6 +32,7 @@
  * @since 0.5
  */
 public final class Tracing {
+
   private static final Logger logger = Logger.getLogger(Tracing.class.getName());
   private static final TraceComponent traceComponent =
       loadTraceComponent(TraceComponent.class.getClassLoader());
@@ -93,6 +94,21 @@
       // Call Class.forName with literal string name of the class to help shading tools.
       return Provider.createInstance(
           Class.forName(
+              "io.opentelemetry.opencensusshim.OpenTelemetryTraceComponentImpl",
+              /*initialize=*/ true,
+              classLoader),
+          TraceComponent.class);
+    } catch (ClassNotFoundException e) {
+      logger.log(
+          Level.FINE,
+          "Couldn't load full implementation for OpenTelemetry TraceComponent, now trying to load "
+              + "original implementation.",
+          e);
+    }
+    try {
+      // Call Class.forName with literal string name of the class to help shading tools.
+      return Provider.createInstance(
+          Class.forName(
               "io.opencensus.impl.trace.TraceComponentImpl", /*initialize=*/ true, classLoader),
           TraceComponent.class);
     } catch (ClassNotFoundException e) {
diff --git a/api/src/main/java/io/opencensus/trace/export/RunningSpanStore.java b/api/src/main/java/io/opencensus/trace/export/RunningSpanStore.java
index fac3c85..79e3276 100644
--- a/api/src/main/java/io/opencensus/trace/export/RunningSpanStore.java
+++ b/api/src/main/java/io/opencensus/trace/export/RunningSpanStore.java
@@ -68,6 +68,17 @@
   public abstract Collection<SpanData> getRunningSpans(Filter filter);
 
   /**
+   * Sets the maximum number of Spans in th {@code RunningSpanStore}.
+   *
+   * <p>{@code 0} means disabled, by default the {@code RunningSpanStore} is disabled.
+   *
+   * @param maxNumberOfSpans the maximum number of Spans in th {@code RunningSpanStore}.
+   * @throws IllegalArgumentException if {@code maxNumberOfSpans} is negative.
+   * @since 0.22
+   */
+  public abstract void setMaxNumberOfSpans(int maxNumberOfSpans);
+
+  /**
    * The summary of all available data.
    *
    * @since 0.5
@@ -197,5 +208,10 @@
       Utils.checkNotNull(filter, "filter");
       return Collections.<SpanData>emptyList();
     }
+
+    @Override
+    public void setMaxNumberOfSpans(int maxNumberOfSpans) {
+      Utils.checkArgument(maxNumberOfSpans >= 0, "Invalid negative maxNumberOfElements");
+    }
   }
 }
diff --git a/api/src/main/java/io/opencensus/trace/export/SampledSpanStore.java b/api/src/main/java/io/opencensus/trace/export/SampledSpanStore.java
index 5d00a45..948c026 100644
--- a/api/src/main/java/io/opencensus/trace/export/SampledSpanStore.java
+++ b/api/src/main/java/io/opencensus/trace/export/SampledSpanStore.java
@@ -18,6 +18,7 @@
 
 import com.google.auto.value.AutoValue;
 import io.opencensus.internal.Utils;
+import io.opencensus.trace.EndSpanOptions;
 import io.opencensus.trace.Span;
 import io.opencensus.trace.Status;
 import io.opencensus.trace.Status.CanonicalCode;
@@ -106,7 +107,9 @@
    *
    * @param spanNames list of span names for which the library will collect samples.
    * @since 0.5
+   * @deprecated since 0.18. Use {@link EndSpanOptions#getSampleToLocalSpanStore()}.
    */
+  @Deprecated
   public abstract void registerSpanNamesForCollection(Collection<String> spanNames);
 
   /**
@@ -118,7 +121,10 @@
    *
    * @param spanNames list of span names for which the library will no longer collect samples.
    * @since 0.5
+   * @deprecated since 0.18. The need of controlling the registration the span name will be removed
+   *     soon.
    */
+  @Deprecated
   public abstract void unregisterSpanNamesForCollection(Collection<String> spanNames);
 
   /**
diff --git a/api/src/main/java/io/opencensus/trace/propagation/PropagationComponent.java b/api/src/main/java/io/opencensus/trace/propagation/PropagationComponent.java
index a90f041..adacbca 100644
--- a/api/src/main/java/io/opencensus/trace/propagation/PropagationComponent.java
+++ b/api/src/main/java/io/opencensus/trace/propagation/PropagationComponent.java
@@ -44,12 +44,23 @@
    * no implementation is provided then no-op implementation will be used.
    *
    * @since 0.11.0
-   * @return the B3 {@code TextFormat} implementation for B3.
+   * @return the B3 {@code TextFormat} implementation.
    */
   @ExperimentalApi
   public abstract TextFormat getB3Format();
 
   /**
+   * Returns the TraceContext {@link TextFormat} with the provided implementations. See <a
+   * href="https://github.com/w3c/distributed-tracing">w3c/distributed-tracing</a> for more
+   * information. If no implementation is provided then no-op implementation will be used.
+   *
+   * @since 0.16.0
+   * @return the TraceContext {@code TextFormat} implementation.
+   */
+  @ExperimentalApi
+  public abstract TextFormat getTraceContextFormat();
+
+  /**
    * Returns an instance that contains no-op implementations for all the instances.
    *
    * @return an instance that contains no-op implementations for all the instances.
@@ -69,5 +80,10 @@
     public TextFormat getB3Format() {
       return TextFormat.getNoopTextFormat();
     }
+
+    @Override
+    public TextFormat getTraceContextFormat() {
+      return TextFormat.getNoopTextFormat();
+    }
   }
 }
diff --git a/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleImpl.java b/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleImpl.java
new file mode 100644
index 0000000..89b733d
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleImpl.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2016-17, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace.unsafe;
+
+import io.grpc.Context;
+import io.opencensus.trace.ContextHandle;
+
+/** {@code ContextHandle} implementation using {@see io.grpc.Context}. */
+class ContextHandleImpl implements ContextHandle {
+
+  private final Context context;
+
+  public ContextHandleImpl(Context context) {
+    this.context = context;
+  }
+
+  Context getContext() {
+    return context;
+  }
+
+  @Override
+  public ContextHandle attach() {
+    return new ContextHandleImpl(context.attach());
+  }
+
+  @Override
+  public void detach(ContextHandle contextHandle) {
+    ContextHandleImpl impl = (ContextHandleImpl) contextHandle;
+    context.detach(impl.context);
+  }
+}
diff --git a/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleUtils.java b/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleUtils.java
new file mode 100644
index 0000000..dc185c4
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/unsafe/ContextHandleUtils.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2016-17, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace.unsafe;
+
+import io.grpc.Context;
+import io.opencensus.internal.Provider;
+import io.opencensus.trace.ContextHandle;
+import io.opencensus.trace.ContextManager;
+import io.opencensus.trace.Span;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.Nullable;
+
+public class ContextHandleUtils {
+
+  // No instance of this class.
+  private ContextHandleUtils() {}
+
+  private static final Logger LOGGER = Logger.getLogger(ContextHandleUtils.class.getName());
+  private static final ContextManager CONTEXT_MANAGER =
+      loadContextManager(ContextManager.class.getClassLoader());
+
+  private static ContextManager loadContextManager(@Nullable ClassLoader classLoader) {
+    try {
+      return Provider.createInstance(
+          Class.forName(
+              "io.opentelemetry.opencensusshim.OpenTelemetryContextManager",
+              /*initialize=*/ true,
+              classLoader),
+          ContextManager.class);
+    } catch (ClassNotFoundException e) {
+      LOGGER.log(
+          Level.FINE,
+          "Couldn't load full implementation for OpenTelemetry context manager, now loading "
+              + "original implementation.",
+          e);
+    }
+    return new ContextManagerImpl();
+  }
+
+  public static ContextHandle currentContext() {
+    return CONTEXT_MANAGER.currentContext();
+  }
+
+  /**
+   * Creates a new {@code ContextHandle} with the given value set.
+   *
+   * @param context the parent {@code ContextHandle}.
+   * @param span the value to be set.
+   * @return a new context with the given value set.
+   */
+  public static ContextHandle withValue(
+      ContextHandle context, @javax.annotation.Nullable Span span) {
+    return CONTEXT_MANAGER.withValue(context, span);
+  }
+
+  /**
+   * Returns the value from the specified {@code ContextHandle}.
+   *
+   * @param context the specified {@code ContextHandle}.
+   * @return the value from the specified {@code ContextHandle}.
+   */
+  public static Span getValue(ContextHandle context) {
+    return CONTEXT_MANAGER.getValue(context);
+  }
+
+  /**
+   * Attempts to pull the {@link io.grpc.Context} out of an OpenCensus {@code ContextHandle}.
+   *
+   * @return The context, or null if not a GRPC backed context handle.
+   */
+  @Nullable
+  public static Context tryExtractGrpcContext(ContextHandle handle) {
+    if (handle instanceof ContextHandleImpl) {
+      return ((ContextHandleImpl) handle).getContext();
+    }
+    // TODO: see if we can do something for the OpenTelemetry shim.
+    return null;
+  }
+}
diff --git a/api/src/main/java/io/opencensus/trace/unsafe/ContextManagerImpl.java b/api/src/main/java/io/opencensus/trace/unsafe/ContextManagerImpl.java
new file mode 100644
index 0000000..abe8acb
--- /dev/null
+++ b/api/src/main/java/io/opencensus/trace/unsafe/ContextManagerImpl.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016-17, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace.unsafe;
+
+import io.grpc.Context;
+import io.opencensus.trace.ContextHandle;
+import io.opencensus.trace.ContextManager;
+import io.opencensus.trace.Span;
+import javax.annotation.Nullable;
+
+/** Default {@code ContextManager} implementation using io.grpc.Context */
+public class ContextManagerImpl implements ContextManager {
+
+  @Override
+  public ContextHandle currentContext() {
+    return wrapContext(Context.current());
+  }
+
+  @Override
+  @SuppressWarnings({"deprecation"})
+  public ContextHandle withValue(ContextHandle contextHandle, @Nullable Span span) {
+    return wrapContext(ContextUtils.withValue(unwrapContext(contextHandle), span));
+  }
+
+  @Override
+  @SuppressWarnings({"deprecation"})
+  public Span getValue(ContextHandle contextHandle) {
+    return ContextUtils.getValue(unwrapContext(contextHandle));
+  }
+
+  private static ContextHandle wrapContext(Context context) {
+    return new ContextHandleImpl(context);
+  }
+
+  private static Context unwrapContext(ContextHandle contextHandle) {
+    return ((ContextHandleImpl) contextHandle).getContext();
+  }
+
+  protected ContextManagerImpl() {}
+}
diff --git a/api/src/main/java/io/opencensus/trace/unsafe/ContextUtils.java b/api/src/main/java/io/opencensus/trace/unsafe/ContextUtils.java
index 3f4b988..b40e542 100644
--- a/api/src/main/java/io/opencensus/trace/unsafe/ContextUtils.java
+++ b/api/src/main/java/io/opencensus/trace/unsafe/ContextUtils.java
@@ -17,6 +17,8 @@
 package io.opencensus.trace.unsafe;
 
 import io.grpc.Context;
+import io.opencensus.internal.Utils;
+import io.opencensus.trace.BlankSpan;
 import io.opencensus.trace.Span;
 
 /*>>>
@@ -24,22 +26,45 @@
 */
 
 /**
- * Util methods/functionality to interact with the {@link io.grpc.Context}.
+ * Utilities for grabbing manipulating current context and grabbing current span.
  *
- * <p>Users must interact with the current Context via the public APIs in {@link
- * io.opencensus.trace.Tracer} and avoid usages of the {@link #CONTEXT_SPAN_KEY} directly.
- *
+ * @deprecated Please use {@link io.opencensus.trace.unsafe.ContextHandleUtils} Util
+ *     methods/functionality to interact with the {@link io.grpc.Context}. Users must interact with
+ *     the current Context via the public APIs in {@link io.opencensus.trace.Tracer} and avoid
+ *     usages of the {@link #CONTEXT_SPAN_KEY} directly.
  * @since 0.5
  */
+@Deprecated()
 public final class ContextUtils {
   // No instance of this class.
   private ContextUtils() {}
 
+  /** The {@link io.grpc.Context.Key} used to interact with {@link io.grpc.Context}. */
+  private static final Context.Key</*@Nullable*/ Span> CONTEXT_SPAN_KEY =
+      Context.<Span>key("opencensus-trace-span-key");
+
   /**
-   * The {@link io.grpc.Context.Key} used to interact with {@link io.grpc.Context}.
+   * Creates a new {@code Context} with the given value set.
    *
-   * @since 0.5
+   * @param context the parent {@code Context}.
+   * @param span the value to be set.
+   * @return a new context with the given value set.
+   * @since 0.21
    */
-  public static final Context.Key</*@Nullable*/ Span> CONTEXT_SPAN_KEY =
-      Context.key("opencensus-trace-span-key");
+  public static Context withValue(Context context, @javax.annotation.Nullable Span span) {
+    return Utils.checkNotNull(context, "context").withValue(CONTEXT_SPAN_KEY, span);
+  }
+
+  /**
+   * Returns the value from the specified {@code Context}.
+   *
+   * @param context the specified {@code Context}.
+   * @return the value from the specified {@code Context}.
+   * @since 0.21
+   */
+  public static Span getValue(Context context) {
+    @javax.annotation.Nullable
+    Span span = CONTEXT_SPAN_KEY.get(Utils.checkNotNull(context, "context"));
+    return span == null ? BlankSpan.INSTANCE : span;
+  }
 }
diff --git a/api/src/test/java/io/opencensus/common/ServerStatsEncodingTest.java b/api/src/test/java/io/opencensus/common/ServerStatsEncodingTest.java
index 6db14a7..c60c4f7 100644
--- a/api/src/test/java/io/opencensus/common/ServerStatsEncodingTest.java
+++ b/api/src/test/java/io/opencensus/common/ServerStatsEncodingTest.java
@@ -34,9 +34,9 @@
 
   @Test
   public void encodeDecodeTest() throws ServerStatsDeserializationException {
-    ServerStats serverStatsToBeEncoded = null;
-    ServerStats serverStatsDecoded = null;
-    byte[] serialized = null;
+    ServerStats serverStatsToBeEncoded;
+    ServerStats serverStatsDecoded;
+    byte[] serialized;
 
     serverStatsToBeEncoded = ServerStats.create(31, 22, (byte) 1);
     serialized = ServerStatsEncoding.toBytes(serverStatsToBeEncoded);
@@ -56,9 +56,9 @@
 
   @Test
   public void skipUnknownFieldTest() throws ServerStatsDeserializationException {
-    ServerStats serverStatsToBeEncoded = null;
-    ServerStats serverStatsDecoded = null;
-    byte[] serialized = null;
+    ServerStats serverStatsToBeEncoded;
+    ServerStats serverStatsDecoded;
+    byte[] serialized;
 
     serverStatsToBeEncoded = ServerStats.create(31, 22, (byte) 1);
     serialized = ServerStatsEncoding.toBytes(serverStatsToBeEncoded);
@@ -79,9 +79,8 @@
 
   @Test
   public void negativeLbLatencyValueTest() throws ServerStatsDeserializationException {
-    ServerStats serverStatsToBeEncoded = null;
-    ServerStats serverStatsDecoded = null;
-    byte[] serialized = null;
+    ServerStats serverStatsToBeEncoded;
+    byte[] serialized;
 
     serverStatsToBeEncoded = ServerStats.create(31, 22, (byte) 1);
     serialized = ServerStatsEncoding.toBytes(serverStatsToBeEncoded);
@@ -100,9 +99,8 @@
 
   @Test
   public void negativeServerLatencyValueTest() throws ServerStatsDeserializationException {
-    ServerStats serverStatsToBeEncoded = null;
-    ServerStats serverStatsDecoded = null;
-    byte[] serialized = null;
+    ServerStats serverStatsToBeEncoded;
+    byte[] serialized;
 
     serverStatsToBeEncoded = ServerStats.create(31, 22, (byte) 1);
     serialized = ServerStatsEncoding.toBytes(serverStatsToBeEncoded);
diff --git a/api/src/test/java/io/opencensus/common/ServerStatsTest.java b/api/src/test/java/io/opencensus/common/ServerStatsTest.java
index 620bbb4..3b395ca 100644
--- a/api/src/test/java/io/opencensus/common/ServerStatsTest.java
+++ b/api/src/test/java/io/opencensus/common/ServerStatsTest.java
@@ -32,7 +32,7 @@
 
   @Test
   public void serverStatsCreate() {
-    ServerStats serverStats = null;
+    ServerStats serverStats;
 
     serverStats = ServerStats.create(31, 22, (byte) 0);
     assertThat(serverStats.getLbLatencyNs()).isEqualTo(31);
diff --git a/api/src/test/java/io/opencensus/internal/UtilsTest.java b/api/src/test/java/io/opencensus/internal/UtilsTest.java
index 608a8fe..86a3420 100644
--- a/api/src/test/java/io/opencensus/internal/UtilsTest.java
+++ b/api/src/test/java/io/opencensus/internal/UtilsTest.java
@@ -32,8 +32,8 @@
   private static final String TEST_MESSAGE = "test message";
   private static final String TEST_MESSAGE_TEMPLATE = "I ate %s eggs.";
   private static final int TEST_MESSAGE_VALUE = 2;
-  private static final String FORMATED_SIMPLE_TEST_MESSAGE = "I ate 2 eggs.";
-  private static final String FORMATED_COMPLEX_TEST_MESSAGE = "I ate 2 eggs. [2]";
+  private static final String FORMATTED_SIMPLE_TEST_MESSAGE = "I ate 2 eggs.";
+  private static final String FORMATTED_COMPLEX_TEST_MESSAGE = "I ate 2 eggs. [2]";
 
   @Rule public ExpectedException thrown = ExpectedException.none();
 
@@ -55,14 +55,14 @@
   @Test
   public void checkArgument_WithSimpleFormat() {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage(FORMATED_SIMPLE_TEST_MESSAGE);
+    thrown.expectMessage(FORMATTED_SIMPLE_TEST_MESSAGE);
     Utils.checkArgument(false, TEST_MESSAGE_TEMPLATE, TEST_MESSAGE_VALUE);
   }
 
   @Test
   public void checkArgument_WithComplexFormat() {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage(FORMATED_COMPLEX_TEST_MESSAGE);
+    thrown.expectMessage(FORMATTED_COMPLEX_TEST_MESSAGE);
     Utils.checkArgument(false, TEST_MESSAGE_TEMPLATE, TEST_MESSAGE_VALUE, TEST_MESSAGE_VALUE);
   }
 
diff --git a/api/src/test/java/io/opencensus/metrics/DerivedDoubleCumulativeTest.java b/api/src/test/java/io/opencensus/metrics/DerivedDoubleCumulativeTest.java
new file mode 100644
index 0000000..5a3f119
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/DerivedDoubleCumulativeTest.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.common.ToDoubleFunction;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DerivedDoubleCumulative}. */
+@RunWith(JUnit4.class)
+public class DerivedDoubleCumulativeTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String NAME = "name";
+  private static final String DESCRIPTION = "description";
+  private static final String UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
+
+  private final DerivedDoubleCumulative derivedDoubleCumulative =
+      DerivedDoubleCumulative.newNoopDerivedDoubleCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+  private static final ToDoubleFunction<Object> doubleFunction =
+      new ToDoubleFunction<Object>() {
+        @Override
+        public double applyAsDouble(Object value) {
+          return 5.0;
+        }
+      };
+
+  @Test
+  public void noopCreateTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedDoubleCumulative.createTimeSeries(null, null, doubleFunction);
+  }
+
+  @Test
+  public void noopCreateTimeSeries_WithNullElement() {
+    List<LabelValue> labelValues = Collections.singletonList(null);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    derivedDoubleCumulative.createTimeSeries(labelValues, null, doubleFunction);
+  }
+
+  @Test
+  public void noopCreateTimeSeries_WithInvalidLabelSize() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    derivedDoubleCumulative.createTimeSeries(EMPTY_LABEL_VALUES, null, doubleFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullFunction() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("function");
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, null);
+  }
+
+  @Test
+  public void noopRemoveTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedDoubleCumulative.removeTimeSeries(null);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/DerivedDoubleGaugeTest.java b/api/src/test/java/io/opencensus/metrics/DerivedDoubleGaugeTest.java
index dbae3c4..a49f76c 100644
--- a/api/src/test/java/io/opencensus/metrics/DerivedDoubleGaugeTest.java
+++ b/api/src/test/java/io/opencensus/metrics/DerivedDoubleGaugeTest.java
@@ -27,7 +27,6 @@
 import org.junit.runners.JUnit4;
 
 /** Unit tests for {@link DerivedDoubleGauge}. */
-// TODO(mayurkale): Add more tests, once DerivedDoubleGauge plugs-in into the registry.
 @RunWith(JUnit4.class)
 public class DerivedDoubleGaugeTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
@@ -62,14 +61,14 @@
   public void noopCreateTimeSeries_WithNullElement() {
     List<LabelValue> labelValues = Collections.singletonList(null);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     derivedDoubleGauge.createTimeSeries(labelValues, null, doubleFunction);
   }
 
   @Test
   public void noopCreateTimeSeries_WithInvalidLabelSize() {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     derivedDoubleGauge.createTimeSeries(EMPTY_LABEL_VALUES, null, doubleFunction);
   }
 
diff --git a/api/src/test/java/io/opencensus/metrics/DerivedLongCumulativeTest.java b/api/src/test/java/io/opencensus/metrics/DerivedLongCumulativeTest.java
new file mode 100644
index 0000000..08d757e
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/DerivedLongCumulativeTest.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import io.opencensus.common.ToLongFunction;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DerivedLongCumulative}. */
+@RunWith(JUnit4.class)
+public class DerivedLongCumulativeTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String NAME = "name";
+  private static final String DESCRIPTION = "description";
+  private static final String UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
+
+  private final DerivedLongCumulative derivedLongCumulative =
+      DerivedLongCumulative.newNoopDerivedLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+  private static final ToLongFunction<Object> longFunction =
+      new ToLongFunction<Object>() {
+        @Override
+        public long applyAsLong(Object value) {
+          return 5;
+        }
+      };
+
+  @Test
+  public void noopCreateTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedLongCumulative.createTimeSeries(null, null, longFunction);
+  }
+
+  @Test
+  public void noopCreateTimeSeries_WithNullElement() {
+    List<LabelValue> labelValues = Collections.singletonList(null);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    derivedLongCumulative.createTimeSeries(labelValues, null, longFunction);
+  }
+
+  @Test
+  public void noopCreateTimeSeries_WithInvalidLabelSize() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    derivedLongCumulative.createTimeSeries(EMPTY_LABEL_VALUES, null, longFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullFunction() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("function");
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, null);
+  }
+
+  @Test
+  public void noopRemoveTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedLongCumulative.removeTimeSeries(null);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/DerivedLongGaugeTest.java b/api/src/test/java/io/opencensus/metrics/DerivedLongGaugeTest.java
index 6a46288..c7a674a 100644
--- a/api/src/test/java/io/opencensus/metrics/DerivedLongGaugeTest.java
+++ b/api/src/test/java/io/opencensus/metrics/DerivedLongGaugeTest.java
@@ -27,7 +27,6 @@
 import org.junit.runners.JUnit4;
 
 /** Unit tests for {@link DerivedLongGauge}. */
-// TODO(mayurkale): Add more tests, once DerivedLongGauge plugs-in into the registry.
 @RunWith(JUnit4.class)
 public class DerivedLongGaugeTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
@@ -62,14 +61,14 @@
   public void noopCreateTimeSeries_WithNullElement() {
     List<LabelValue> labelValues = Collections.singletonList(null);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     derivedLongGauge.createTimeSeries(labelValues, null, longFunction);
   }
 
   @Test
   public void noopCreateTimeSeries_WithInvalidLabelSize() {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     derivedLongGauge.createTimeSeries(EMPTY_LABEL_VALUES, null, longFunction);
   }
 
diff --git a/api/src/test/java/io/opencensus/metrics/DoubleCumulativeTest.java b/api/src/test/java/io/opencensus/metrics/DoubleCumulativeTest.java
new file mode 100644
index 0000000..ff1a6c2
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/DoubleCumulativeTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DoubleCumulative}. */
+@RunWith(JUnit4.class)
+public class DoubleCumulativeTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String NAME = "name";
+  private static final String DESCRIPTION = "description";
+  private static final String UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelKey> EMPTY_LABEL_KEYS = new ArrayList<LabelKey>();
+  private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithNullLabelValues() {
+    DoubleCumulative doubleCumulative =
+        DoubleCumulative.newNoopDoubleCumulative(NAME, DESCRIPTION, UNIT, EMPTY_LABEL_KEYS);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    doubleCumulative.getOrCreateTimeSeries(null);
+  }
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithNullElement() {
+    List<LabelValue> labelValues = Collections.singletonList(null);
+    DoubleCumulative doubleCumulative =
+        DoubleCumulative.newNoopDoubleCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    doubleCumulative.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithInvalidLabelSize() {
+    DoubleCumulative doubleCumulative =
+        DoubleCumulative.newNoopDoubleCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    doubleCumulative.getOrCreateTimeSeries(EMPTY_LABEL_VALUES);
+  }
+
+  @Test
+  public void noopRemoveTimeSeries_WithNullLabelValues() {
+    DoubleCumulative doubleCumulative =
+        DoubleCumulative.newNoopDoubleCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    doubleCumulative.removeTimeSeries(null);
+  }
+
+  @Test
+  public void noopSameAs() {
+    DoubleCumulative doubleCumulative =
+        DoubleCumulative.newNoopDoubleCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    assertThat(doubleCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleCumulative.getDefaultTimeSeries());
+    assertThat(doubleCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleCumulative.getOrCreateTimeSeries(LABEL_VALUES));
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/DoubleGaugeTest.java b/api/src/test/java/io/opencensus/metrics/DoubleGaugeTest.java
index b0cdea7..058c509 100644
--- a/api/src/test/java/io/opencensus/metrics/DoubleGaugeTest.java
+++ b/api/src/test/java/io/opencensus/metrics/DoubleGaugeTest.java
@@ -42,8 +42,6 @@
   private static final List<LabelKey> EMPTY_LABEL_KEYS = new ArrayList<LabelKey>();
   private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
 
-  // TODO(mayurkale): Add more tests, once DoubleGauge plugs-in into the registry.
-
   @Test
   public void noopGetOrCreateTimeSeries_WithNullLabelValues() {
     DoubleGauge doubleGauge =
@@ -58,7 +56,7 @@
     List<LabelValue> labelValues = Collections.singletonList(null);
     DoubleGauge doubleGauge = DoubleGauge.newNoopDoubleGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     doubleGauge.getOrCreateTimeSeries(labelValues);
   }
 
@@ -66,7 +64,7 @@
   public void noopGetOrCreateTimeSeries_WithInvalidLabelSize() {
     DoubleGauge doubleGauge = DoubleGauge.newNoopDoubleGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     doubleGauge.getOrCreateTimeSeries(EMPTY_LABEL_VALUES);
   }
 
@@ -81,8 +79,9 @@
   @Test
   public void noopSameAs() {
     DoubleGauge doubleGauge = DoubleGauge.newNoopDoubleGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
-    assertThat(doubleGauge.getDefaultTimeSeries()).isSameAs(doubleGauge.getDefaultTimeSeries());
     assertThat(doubleGauge.getDefaultTimeSeries())
-        .isSameAs(doubleGauge.getOrCreateTimeSeries(LABEL_VALUES));
+        .isSameInstanceAs(doubleGauge.getDefaultTimeSeries());
+    assertThat(doubleGauge.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleGauge.getOrCreateTimeSeries(LABEL_VALUES));
   }
 }
diff --git a/api/src/test/java/io/opencensus/metrics/LongCumulativeTest.java b/api/src/test/java/io/opencensus/metrics/LongCumulativeTest.java
new file mode 100644
index 0000000..955378a
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/LongCumulativeTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link LongCumulative}. */
+@RunWith(JUnit4.class)
+public class LongCumulativeTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String NAME = "name";
+  private static final String DESCRIPTION = "description";
+  private static final String UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelKey> EMPTY_LABEL_KEYS = new ArrayList<LabelKey>();
+  private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithNullLabelValues() {
+    LongCumulative longCumulative =
+        LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, EMPTY_LABEL_KEYS);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    longCumulative.getOrCreateTimeSeries(null);
+  }
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithNullElement() {
+    List<LabelValue> labelValues = Collections.singletonList(null);
+    LongCumulative longCumulative =
+        LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    longCumulative.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void noopGetOrCreateTimeSeries_WithInvalidLabelSize() {
+    LongCumulative longCumulative =
+        LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    longCumulative.getOrCreateTimeSeries(EMPTY_LABEL_VALUES);
+  }
+
+  @Test
+  public void noopRemoveTimeSeries_WithNullLabelValues() {
+    LongCumulative longCumulative =
+        LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    longCumulative.removeTimeSeries(null);
+  }
+
+  @Test
+  public void noopSameAs() {
+    LongCumulative longCumulative =
+        LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    assertThat(longCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(longCumulative.getDefaultTimeSeries());
+    assertThat(longCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(longCumulative.getOrCreateTimeSeries(LABEL_VALUES));
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/LongGaugeTest.java b/api/src/test/java/io/opencensus/metrics/LongGaugeTest.java
index eedb287..a006e36 100644
--- a/api/src/test/java/io/opencensus/metrics/LongGaugeTest.java
+++ b/api/src/test/java/io/opencensus/metrics/LongGaugeTest.java
@@ -42,8 +42,6 @@
   private static final List<LabelKey> EMPTY_LABEL_KEYS = new ArrayList<LabelKey>();
   private static final List<LabelValue> EMPTY_LABEL_VALUES = new ArrayList<LabelValue>();
 
-  // TODO(mayurkale): Add more tests, once LongGauge plugs-in into the registry.
-
   @Test
   public void noopGetOrCreateTimeSeries_WithNullLabelValues() {
     LongGauge longGauge = LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, EMPTY_LABEL_KEYS);
@@ -57,7 +55,7 @@
     List<LabelValue> labelValues = Collections.singletonList(null);
     LongGauge longGauge = LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     longGauge.getOrCreateTimeSeries(labelValues);
   }
 
@@ -65,7 +63,7 @@
   public void noopGetOrCreateTimeSeries_WithInvalidLabelSize() {
     LongGauge longGauge = LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     longGauge.getOrCreateTimeSeries(EMPTY_LABEL_VALUES);
   }
 
@@ -80,8 +78,8 @@
   @Test
   public void noopSameAs() {
     LongGauge longGauge = LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
-    assertThat(longGauge.getDefaultTimeSeries()).isSameAs(longGauge.getDefaultTimeSeries());
+    assertThat(longGauge.getDefaultTimeSeries()).isSameInstanceAs(longGauge.getDefaultTimeSeries());
     assertThat(longGauge.getDefaultTimeSeries())
-        .isSameAs(longGauge.getOrCreateTimeSeries(LABEL_VALUES));
+        .isSameInstanceAs(longGauge.getOrCreateTimeSeries(LABEL_VALUES));
   }
 }
diff --git a/api/src/test/java/io/opencensus/metrics/MetricOptionsTest.java b/api/src/test/java/io/opencensus/metrics/MetricOptionsTest.java
new file mode 100644
index 0000000..91ab524
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/MetricOptionsTest.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link MetricOptions}. */
+@RunWith(JUnit4.class)
+public class MetricOptionsTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String DESCRIPTION = "test_description";
+  private static final String UNIT = "1";
+  private static final LabelKey LABEL_KEY = LabelKey.create("test_key", "test key description");
+  private static final List<LabelKey> LABEL_KEYS = Collections.singletonList(LABEL_KEY);
+  private static final LabelValue LABEL_VALUE = LabelValue.create("test_value");
+  private static final Map<LabelKey, LabelValue> CONSTANT_LABELS =
+      Collections.singletonMap(LabelKey.create("test_key_1", "test key description"), LABEL_VALUE);
+
+  @Test
+  public void nullDescription() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("description");
+    MetricOptions.builder().setDescription(null).build();
+  }
+
+  @Test
+  public void nullUnit() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("unit");
+    MetricOptions.builder().setUnit(null).build();
+  }
+
+  @Test
+  public void nullLabelKeys() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelKeys");
+    MetricOptions.builder().setLabelKeys(null).build();
+  }
+
+  @Test
+  public void labelKeys_WithNullElement() {
+    List<LabelKey> labelKeys = Collections.singletonList(null);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelKeys elements");
+    MetricOptions.builder().setLabelKeys(labelKeys).build();
+  }
+
+  @Test
+  public void sameLabelKeyInLabelsKey() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Invalid LabelKey in labelKeys");
+    MetricOptions.builder()
+        .setLabelKeys(Arrays.asList(LABEL_KEY, LABEL_KEY))
+        .setConstantLabels(Collections.singletonMap(LABEL_KEY, LABEL_VALUE))
+        .build();
+  }
+
+  @Test
+  public void nullConstantLabels() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("constantLabels");
+    MetricOptions.builder().setConstantLabels(null).build();
+  }
+
+  @Test
+  public void constantLabels_WithNullKey() {
+    Map<LabelKey, LabelValue> constantLabels = Collections.singletonMap(null, LABEL_VALUE);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("constantLabels elements");
+    MetricOptions.builder().setConstantLabels(constantLabels).build();
+  }
+
+  @Test
+  public void constantLabels_WithNullValue() {
+    Map<LabelKey, LabelValue> constantLabels = Collections.singletonMap(LABEL_KEY, null);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("constantLabels elements");
+    MetricOptions.builder().setConstantLabels(constantLabels).build();
+  }
+
+  @Test
+  public void sameLabelKeyInConstantLabelsAndLabelsKey() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Invalid LabelKey in constantLabels");
+    MetricOptions.builder()
+        .setLabelKeys(LABEL_KEYS)
+        .setConstantLabels(Collections.singletonMap(LABEL_KEY, LABEL_VALUE))
+        .build();
+  }
+
+  @Test
+  public void setAndGet() {
+    MetricOptions metricOptions =
+        MetricOptions.builder()
+            .setDescription(DESCRIPTION)
+            .setUnit(UNIT)
+            .setLabelKeys(LABEL_KEYS)
+            .setConstantLabels(CONSTANT_LABELS)
+            .build();
+    assertThat(metricOptions.getDescription()).isEqualTo(DESCRIPTION);
+    assertThat(metricOptions.getUnit()).isEqualTo(UNIT);
+    assertThat(metricOptions.getLabelKeys()).isEqualTo(LABEL_KEYS);
+    assertThat(metricOptions.getConstantLabels()).isEqualTo(CONSTANT_LABELS);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/MetricRegistryTest.java b/api/src/test/java/io/opencensus/metrics/MetricRegistryTest.java
index d8a26cc..3a265c8 100644
--- a/api/src/test/java/io/opencensus/metrics/MetricRegistryTest.java
+++ b/api/src/test/java/io/opencensus/metrics/MetricRegistryTest.java
@@ -20,6 +20,7 @@
 
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -31,16 +32,27 @@
 public class MetricRegistryTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
 
-  private static final String NAME = "name";
-  private static final String NAME_2 = "name2";
-  private static final String NAME_3 = "name3";
-  private static final String NAME_4 = "name4";
-  private static final String DESCRIPTION = "description";
+  private static final String NAME = "test_name";
+  private static final String NAME_2 = "test_name2";
+  private static final String NAME_3 = "test_name3";
+  private static final String NAME_4 = "test_name4";
+  private static final String DESCRIPTION = "test_description";
   private static final String UNIT = "1";
-  private static final List<LabelKey> LABEL_KEY =
-      Collections.singletonList(LabelKey.create("key", "key description"));
-  private static final List<LabelValue> LABEL_VALUES =
-      Collections.singletonList(LabelValue.create("value"));
+  private static final LabelKey LABEL_KEY = LabelKey.create("test_key", "test key description");
+  private static final List<LabelKey> LABEL_KEYS = Collections.singletonList(LABEL_KEY);
+  private static final LabelValue LABEL_VALUE = LabelValue.create("test_value");
+  private static final LabelValue LABEL_VALUE_2 = LabelValue.create("test_value_2");
+  private static final List<LabelValue> LABEL_VALUES = Collections.singletonList(LABEL_VALUE);
+  private static final Map<LabelKey, LabelValue> CONSTANT_LABELS =
+      Collections.singletonMap(
+          LabelKey.create("test_key_1", "test key description"), LABEL_VALUE_2);
+  private static final MetricOptions METRIC_OPTIONS =
+      MetricOptions.builder()
+          .setDescription(DESCRIPTION)
+          .setUnit(UNIT)
+          .setLabelKeys(LABEL_KEYS)
+          .setConstantLabels(CONSTANT_LABELS)
+          .build();
   private final MetricRegistry metricRegistry =
       MetricsComponent.newNoopMetricsComponent().getMetricRegistry();
 
@@ -48,173 +60,116 @@
   public void noopAddLongGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addLongGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddLongGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addLongGauge(NAME, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddLongGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddLongGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void noopAddLongGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addLongGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void noopAddDoubleGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDoubleGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDoubleGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDoubleGauge(NAME_2, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDoubleGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDoubleGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void noopAddDoubleGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addDoubleGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void noopAddDerivedLongGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDerivedLongGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDerivedLongGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDerivedLongGauge(NAME_3, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDerivedLongGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void noopAddDerivedLongGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void noopAddDerivedLongGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addDerivedLongGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void noopAddDerivedDoubleGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDerivedDoubleGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
+    metricRegistry.addDerivedDoubleGauge(null, METRIC_OPTIONS);
   }
 
   @Test
-  public void noopAddDerivedDoubleGauge_NullDescription() {
+  public void noopAddLongCumulative_NullName() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, null, UNIT, LABEL_KEY);
+    thrown.expectMessage("name");
+    metricRegistry.addLongCumulative(null, METRIC_OPTIONS);
   }
 
   @Test
-  public void noopAddDerivedDoubleGauge_NullUnit() {
+  public void noopAddDoubleCumulative_NullName() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, null, LABEL_KEY);
+    thrown.expectMessage("name");
+    metricRegistry.addDoubleCumulative(null, METRIC_OPTIONS);
   }
 
   @Test
-  public void noopAddDerivedDoubleGauge_NullLabels() {
+  public void noopAddDerivedLongCumulative_NullName() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, null);
+    thrown.expectMessage("name");
+    metricRegistry.addDerivedLongCumulative(null, METRIC_OPTIONS);
   }
 
   @Test
-  public void noopAddDerivedDoubleGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
+  public void noopAddDerivedDoubleCumulative_NullName() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, labelKeys);
+    thrown.expectMessage("name");
+    metricRegistry.addDerivedDoubleCumulative(null, METRIC_OPTIONS);
   }
 
   @Test
   public void noopSameAs() {
-    LongGauge longGauge = metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
-    assertThat(longGauge.getDefaultTimeSeries()).isSameAs(longGauge.getDefaultTimeSeries());
+    LongGauge longGauge = metricRegistry.addLongGauge(NAME, METRIC_OPTIONS);
+    assertThat(longGauge.getDefaultTimeSeries()).isSameInstanceAs(longGauge.getDefaultTimeSeries());
     assertThat(longGauge.getDefaultTimeSeries())
-        .isSameAs(longGauge.getOrCreateTimeSeries(LABEL_VALUES));
+        .isSameInstanceAs(longGauge.getOrCreateTimeSeries(LABEL_VALUES));
 
-    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY);
-    assertThat(doubleGauge.getDefaultTimeSeries()).isSameAs(doubleGauge.getDefaultTimeSeries());
+    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, METRIC_OPTIONS);
     assertThat(doubleGauge.getDefaultTimeSeries())
-        .isSameAs(doubleGauge.getOrCreateTimeSeries(LABEL_VALUES));
+        .isSameInstanceAs(doubleGauge.getDefaultTimeSeries());
+    assertThat(doubleGauge.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleGauge.getOrCreateTimeSeries(LABEL_VALUES));
+
+    LongCumulative longCumulative = metricRegistry.addLongCumulative(NAME, METRIC_OPTIONS);
+    assertThat(longCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(longCumulative.getDefaultTimeSeries());
+    assertThat(longCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(longCumulative.getOrCreateTimeSeries(LABEL_VALUES));
+
+    DoubleCumulative doubleCumulative = metricRegistry.addDoubleCumulative(NAME_2, METRIC_OPTIONS);
+    assertThat(doubleCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleCumulative.getDefaultTimeSeries());
+    assertThat(doubleCumulative.getDefaultTimeSeries())
+        .isSameInstanceAs(doubleCumulative.getOrCreateTimeSeries(LABEL_VALUES));
   }
 
   @Test
   public void noopInstanceOf() {
-    assertThat(metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY))
-        .isInstanceOf(LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY).getClass());
-    assertThat(metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY))
+    assertThat(metricRegistry.addLongGauge(NAME, METRIC_OPTIONS))
+        .isInstanceOf(LongGauge.newNoopLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEYS).getClass());
+    assertThat(metricRegistry.addDoubleGauge(NAME_2, METRIC_OPTIONS))
         .isInstanceOf(
-            DoubleGauge.newNoopDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY).getClass());
-    assertThat(metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEY))
+            DoubleGauge.newNoopDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEYS).getClass());
+    assertThat(metricRegistry.addDerivedLongGauge(NAME_3, METRIC_OPTIONS))
         .isInstanceOf(
-            DerivedLongGauge.newNoopDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEY)
+            DerivedLongGauge.newNoopDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEYS)
                 .getClass());
-    assertThat(metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEY))
+    assertThat(metricRegistry.addDerivedDoubleGauge(NAME_4, METRIC_OPTIONS))
         .isInstanceOf(
-            DerivedDoubleGauge.newNoopDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEY)
+            DerivedDoubleGauge.newNoopDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEYS)
+                .getClass());
+
+    assertThat(metricRegistry.addLongCumulative(NAME, METRIC_OPTIONS))
+        .isInstanceOf(
+            LongCumulative.newNoopLongCumulative(NAME, DESCRIPTION, UNIT, LABEL_KEYS).getClass());
+    assertThat(metricRegistry.addDoubleCumulative(NAME_2, METRIC_OPTIONS))
+        .isInstanceOf(
+            DoubleCumulative.newNoopDoubleCumulative(NAME_2, DESCRIPTION, UNIT, LABEL_KEYS)
+                .getClass());
+    assertThat(metricRegistry.addDerivedLongCumulative(NAME_3, METRIC_OPTIONS))
+        .isInstanceOf(
+            DerivedLongCumulative.newNoopDerivedLongCumulative(
+                    NAME_3, DESCRIPTION, UNIT, LABEL_KEYS)
+                .getClass());
+    assertThat(metricRegistry.addDerivedDoubleCumulative(NAME_4, METRIC_OPTIONS))
+        .isInstanceOf(
+            DerivedDoubleCumulative.newNoopDerivedDoubleCumulative(
+                    NAME_4, DESCRIPTION, UNIT, LABEL_KEYS)
                 .getClass());
   }
 }
diff --git a/api/src/test/java/io/opencensus/metrics/data/AttachmentValueTest.java b/api/src/test/java/io/opencensus/metrics/data/AttachmentValueTest.java
new file mode 100644
index 0000000..9c5efe6
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/data/AttachmentValueTest.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics.data;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link io.opencensus.metrics.data.AttachmentValue}. */
+@RunWith(JUnit4.class)
+public class AttachmentValueTest {
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void getValue() {
+    AttachmentValueString attachmentValue = AttachmentValueString.create("value");
+    assertThat(attachmentValue.getValue()).isEqualTo("value");
+  }
+
+  @Test
+  public void preventNullString() {
+    thrown.expect(NullPointerException.class);
+    AttachmentValueString.create(null);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/data/ExemplarTest.java b/api/src/test/java/io/opencensus/metrics/data/ExemplarTest.java
new file mode 100644
index 0000000..3735f92
--- /dev/null
+++ b/api/src/test/java/io/opencensus/metrics/data/ExemplarTest.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.metrics.data;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import java.util.Collections;
+import java.util.Map;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link io.opencensus.metrics.data.Exemplar}. */
+@RunWith(JUnit4.class)
+public class ExemplarTest {
+
+  private static final double TOLERANCE = 1e-6;
+  private static final Timestamp TIMESTAMP_1 = Timestamp.create(1, 0);
+  private static final AttachmentValue ATTACHMENT_VALUE = AttachmentValueString.create("value");
+  private static final Map<String, AttachmentValue> ATTACHMENTS =
+      Collections.singletonMap("key", ATTACHMENT_VALUE);
+
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void testExemplar() {
+    Exemplar exemplar = Exemplar.create(15.0, TIMESTAMP_1, ATTACHMENTS);
+    assertThat(exemplar.getValue()).isWithin(TOLERANCE).of(15.0);
+    assertThat(exemplar.getTimestamp()).isEqualTo(TIMESTAMP_1);
+    assertThat(exemplar.getAttachments()).isEqualTo(ATTACHMENTS);
+  }
+
+  @Test
+  public void testExemplar_PreventNullTimestamp() {
+    thrown.expect(NullPointerException.class);
+    Exemplar.create(15, null, ATTACHMENTS);
+  }
+
+  @Test
+  public void testExemplar_PreventNullAttachments() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("attachments");
+    Exemplar.create(15, TIMESTAMP_1, null);
+  }
+
+  @Test
+  public void testExemplar_PreventNullAttachmentKey() {
+    Map<String, AttachmentValue> attachments = Collections.singletonMap(null, ATTACHMENT_VALUE);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("key of attachment");
+    Exemplar.create(15, TIMESTAMP_1, attachments);
+  }
+
+  @Test
+  public void testExemplar_PreventNullAttachmentValue() {
+    Map<String, AttachmentValue> attachments = Collections.singletonMap("key", null);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("value of attachment");
+    Exemplar.create(15, TIMESTAMP_1, attachments);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/metrics/export/DistributionTest.java b/api/src/test/java/io/opencensus/metrics/export/DistributionTest.java
index 85b3149..07dc01e 100644
--- a/api/src/test/java/io/opencensus/metrics/export/DistributionTest.java
+++ b/api/src/test/java/io/opencensus/metrics/export/DistributionTest.java
@@ -22,10 +22,12 @@
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
 import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.metrics.export.Distribution.Bucket;
 import io.opencensus.metrics.export.Distribution.BucketOptions;
 import io.opencensus.metrics.export.Distribution.BucketOptions.ExplicitOptions;
-import io.opencensus.metrics.export.Distribution.Exemplar;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -44,7 +46,9 @@
   @Rule public final ExpectedException thrown = ExpectedException.none();
 
   private static final Timestamp TIMESTAMP = Timestamp.create(1, 0);
-  private static final Map<String, String> ATTACHMENTS = Collections.singletonMap("key", "value");
+  private static final AttachmentValue VALUE = AttachmentValueString.create("value");
+  private static final Map<String, AttachmentValue> ATTACHMENTS =
+      Collections.<String, AttachmentValue>singletonMap("key", VALUE);
   private static final double TOLERANCE = 1e-6;
 
   @Test
@@ -190,7 +194,7 @@
 
   @Test
   public void createExemplar_PreventNullAttachmentKey() {
-    Map<String, String> attachments = Collections.singletonMap(null, "value");
+    Map<String, AttachmentValue> attachments = Collections.singletonMap(null, VALUE);
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("key of attachment");
     Exemplar.create(15, TIMESTAMP, attachments);
@@ -198,7 +202,7 @@
 
   @Test
   public void createExemplar_PreventNullAttachmentValue() {
-    Map<String, String> attachments = Collections.singletonMap("key", null);
+    Map<String, AttachmentValue> attachments = Collections.singletonMap("key", null);
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("value of attachment");
     Exemplar.create(15, TIMESTAMP, attachments);
diff --git a/api/src/test/java/io/opencensus/metrics/export/SummaryTest.java b/api/src/test/java/io/opencensus/metrics/export/SummaryTest.java
index c10df04..0253e27 100644
--- a/api/src/test/java/io/opencensus/metrics/export/SummaryTest.java
+++ b/api/src/test/java/io/opencensus/metrics/export/SummaryTest.java
@@ -109,7 +109,7 @@
   @Test
   public void createSnapshot_OneNullValueAtPercentile() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("value in valueAtPercentiles");
+    thrown.expectMessage("valueAtPercentile");
     Snapshot.create(10L, 87.07, Collections.<ValueAtPercentile>singletonList(null));
   }
 
diff --git a/api/src/test/java/io/opencensus/metrics/export/TimeSeriesTest.java b/api/src/test/java/io/opencensus/metrics/export/TimeSeriesTest.java
index 92a2c8c..630cdb3 100644
--- a/api/src/test/java/io/opencensus/metrics/export/TimeSeriesTest.java
+++ b/api/src/test/java/io/opencensus/metrics/export/TimeSeriesTest.java
@@ -60,6 +60,16 @@
   }
 
   @Test
+  public void setPoint_TimeSeries() {
+    TimeSeries timeSeries = TimeSeries.create(Arrays.asList(LABEL_VALUE_1, LABEL_VALUE_2));
+    TimeSeries timeSeries1 = timeSeries.setPoint(POINT_1);
+    assertThat(timeSeries1.getLabelValues())
+        .containsExactly(LABEL_VALUE_1, LABEL_VALUE_2)
+        .inOrder();
+    assertThat(timeSeries1.getPoints()).containsExactly(POINT_1).inOrder();
+  }
+
+  @Test
   public void create_WithNullLabelValueList() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage(CoreMatchers.equalTo("labelValues"));
diff --git a/api/src/test/java/io/opencensus/resource/ResourceTest.java b/api/src/test/java/io/opencensus/resource/ResourceTest.java
new file mode 100644
index 0000000..3d5d6e2
--- /dev/null
+++ b/api/src/test/java/io/opencensus/resource/ResourceTest.java
@@ -0,0 +1,267 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.resource;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assert.assertEquals;
+
+import com.google.common.testing.EqualsTester;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link Resource}. */
+@RunWith(JUnit4.class)
+public class ResourceTest {
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private static final Resource DEFAULT_RESOURCE =
+      Resource.create(null, Collections.<String, String>emptyMap());
+  private static final Resource DEFAULT_RESOURCE_1 =
+      Resource.create("default", Collections.singletonMap("a", "100"));
+  private Resource resource1;
+  private Resource resource2;
+
+  @Before
+  public void setUp() {
+    Map<String, String> labelMap1 = new HashMap<String, String>();
+    labelMap1.put("a", "1");
+    labelMap1.put("b", "2");
+    Map<String, String> labelMap2 = new HashMap<String, String>();
+    labelMap2.put("a", "1");
+    labelMap2.put("b", "3");
+    labelMap2.put("c", "4");
+    resource1 = Resource.create("t1", labelMap1);
+    resource2 = Resource.create("t2", labelMap2);
+  }
+
+  @Test
+  public void testMaxLength() {
+    assertThat(Resource.MAX_LENGTH).isEqualTo(255);
+  }
+
+  @Test
+  public void testParseResourceType() {
+    String rawEnvType = "k8s.io/container";
+    String envType = Resource.parseResourceType(rawEnvType);
+    assertThat(envType).isNotNull();
+    assertEquals(rawEnvType, envType);
+  }
+
+  @Test
+  public void testParseResourceType_Null() {
+    String envType = Resource.parseResourceType(null);
+    assertThat(envType).isNull();
+  }
+
+  @Test
+  public void testParseResourceType_DisallowUnprintableChars() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage(
+        "Type should be a ASCII string with a length greater than 0 and not exceed "
+            + "255 characters.");
+    Resource.parseResourceType("\2ab\3cd");
+  }
+
+  @Test
+  public void testParseResourceType_DisallowTypeNameOverMaxLength() {
+    char[] chars = new char[Resource.MAX_LENGTH + 1];
+    Arrays.fill(chars, 'k');
+    String type = new String(chars);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage(
+        "Type should be a ASCII string with a length greater than 0 and not exceed "
+            + "255 characters.");
+    Resource.parseResourceType(type);
+  }
+
+  @Test
+  public void testParseResourceLabels() {
+    Map<String, String> expectedLabelsMap = new HashMap<String, String>();
+    expectedLabelsMap.put("k8s.io/pod/name", "pod-xyz-123");
+    expectedLabelsMap.put("k8s.io/container/name", "c1");
+    expectedLabelsMap.put("k8s.io/namespace/name", "default");
+
+    String rawEnvLabels =
+        "k8s.io/pod/name=\"pod-xyz-123\",k8s.io/container/name=\"c1\","
+            + "k8s.io/namespace/name=\"default\"";
+    Map<String, String> labelsMap = Resource.parseResourceLabels(rawEnvLabels);
+    assertEquals(expectedLabelsMap, labelsMap);
+    assertEquals(3, labelsMap.size());
+  }
+
+  @Test
+  public void testParseResourceLabels_WithSpaces() {
+    Map<String, String> expectedLabelsMap = new HashMap<String, String>();
+    expectedLabelsMap.put("example.org/test-1", "test $ \\\"");
+    expectedLabelsMap.put("Abc", "Def");
+
+    String rawEnvLabels = "example.org/test-1=\"test $ \\\"\" ,  Abc=\"Def\"";
+    Map<String, String> labelsMap = Resource.parseResourceLabels(rawEnvLabels);
+    assertEquals(expectedLabelsMap, labelsMap);
+    assertEquals(2, labelsMap.size());
+  }
+
+  @Test
+  public void testParseResourceLabels_SingleKey() {
+    Map<String, String> expectedLabelsMap = new HashMap<String, String>();
+    expectedLabelsMap.put("single", "key");
+
+    String rawEnvLabels = "single=\"key\"";
+    Map<String, String> labelsMap = Resource.parseResourceLabels(rawEnvLabels);
+    assertEquals(1, labelsMap.size());
+    assertEquals(expectedLabelsMap, labelsMap);
+  }
+
+  @Test
+  public void testParseResourceLabels_Null() {
+    Map<String, String> labelsMap = Resource.parseResourceLabels(null);
+    assertThat(labelsMap).isNotNull();
+    assertThat(labelsMap).isEmpty();
+  }
+
+  @Test
+  public void testParseResourceLabels_DisallowUnprintableChars() {
+    String rawEnvLabels = "example.org/test-1=\2ab\3cd";
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage(
+        "Label value should be a ASCII string with a length not exceed 255 characters.");
+    Resource.parseResourceLabels(rawEnvLabels);
+  }
+
+  @Test
+  public void testParseResourceLabels_DisallowLabelKeyOverMaxLength() {
+    char[] chars = new char[Resource.MAX_LENGTH + 1];
+    Arrays.fill(chars, 'k');
+    String rawEnvLabels = new String(chars) + "=test-1";
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage(
+        "Label key should be a ASCII string with a length greater than 0 and not exceed "
+            + "255 characters.");
+    Resource.parseResourceLabels(rawEnvLabels);
+  }
+
+  @Test
+  public void testParseResourceLabels_DisallowLabelValueOverMaxLength() {
+    char[] chars = new char[Resource.MAX_LENGTH + 1];
+    Arrays.fill(chars, 'k');
+    String rawEnvLabels = "example.org/test-1=" + new String(chars);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage(
+        "Label value should be a ASCII string with a length not exceed 255 characters.");
+    Resource.parseResourceLabels(rawEnvLabels);
+  }
+
+  @Test
+  public void create() {
+    Map<String, String> labelMap = new HashMap<String, String>();
+    labelMap.put("a", "1");
+    labelMap.put("b", "2");
+    Resource resource = Resource.create("t1", labelMap);
+    assertThat(resource.getType()).isNotNull();
+    assertThat(resource.getType()).isEqualTo("t1");
+    assertThat(resource.getLabels()).isNotNull();
+    assertThat(resource.getLabels().size()).isEqualTo(2);
+    assertThat(resource.getLabels()).isEqualTo(labelMap);
+
+    Resource resource1 = Resource.create(null, Collections.<String, String>emptyMap());
+    assertThat(resource1.getType()).isNull();
+    assertThat(resource1.getLabels()).isNotNull();
+    assertThat(resource1.getLabels()).isEmpty();
+  }
+
+  @Test
+  public void testResourceEquals() {
+    Map<String, String> labelMap1 = new HashMap<String, String>();
+    labelMap1.put("a", "1");
+    labelMap1.put("b", "2");
+    Map<String, String> labelMap2 = new HashMap<String, String>();
+    labelMap2.put("a", "1");
+    labelMap2.put("b", "3");
+    labelMap2.put("c", "4");
+    new EqualsTester()
+        .addEqualityGroup(Resource.create("t1", labelMap1), Resource.create("t1", labelMap1))
+        .addEqualityGroup(Resource.create("t2", labelMap2))
+        .testEquals();
+  }
+
+  @Test
+  public void testMergeResources() {
+    Map<String, String> expectedLabelMap = new HashMap<String, String>();
+    expectedLabelMap.put("a", "1");
+    expectedLabelMap.put("b", "2");
+    expectedLabelMap.put("c", "4");
+
+    Resource resource =
+        Resource.mergeResources(Arrays.asList(DEFAULT_RESOURCE, resource1, resource2));
+    assertThat(resource.getType()).isEqualTo("t1");
+    assertThat(resource.getLabels()).isEqualTo(expectedLabelMap);
+  }
+
+  @Test
+  public void testMergeResources_Resource1() {
+    Map<String, String> expectedLabelMap = new HashMap<String, String>();
+    expectedLabelMap.put("a", "1");
+    expectedLabelMap.put("b", "2");
+
+    Resource resource = Resource.mergeResources(Arrays.asList(DEFAULT_RESOURCE, resource1));
+    assertThat(resource.getType()).isEqualTo("t1");
+    assertThat(resource.getLabels()).isEqualTo(expectedLabelMap);
+  }
+
+  @Test
+  public void testMergeResources_Resource1_Null() {
+    Map<String, String> expectedLabelMap = new HashMap<String, String>();
+    expectedLabelMap.put("a", "1");
+    expectedLabelMap.put("b", "3");
+    expectedLabelMap.put("c", "4");
+
+    Resource resource = Resource.mergeResources(Arrays.asList(DEFAULT_RESOURCE, null, resource2));
+    assertThat(resource.getType()).isEqualTo("t2");
+    assertThat(resource.getLabels()).isEqualTo(expectedLabelMap);
+  }
+
+  @Test
+  public void testMergeResources_Resource2_Null() {
+    Map<String, String> expectedLabelMap = new HashMap<String, String>();
+    expectedLabelMap.put("a", "1");
+    expectedLabelMap.put("b", "2");
+
+    Resource resource = Resource.mergeResources(Arrays.asList(DEFAULT_RESOURCE, resource1, null));
+    assertThat(resource.getType()).isEqualTo("t1");
+    assertThat(resource.getLabels()).isEqualTo(expectedLabelMap);
+  }
+
+  @Test
+  public void testMergeResources_DefaultResource() {
+    Map<String, String> expectedLabelMap = new HashMap<String, String>();
+    expectedLabelMap.put("a", "100");
+    expectedLabelMap.put("b", "2");
+    expectedLabelMap.put("c", "4");
+
+    Resource resource =
+        Resource.mergeResources(Arrays.asList(DEFAULT_RESOURCE_1, resource1, resource2));
+    assertThat(resource.getType()).isEqualTo("default");
+    assertThat(resource.getLabels()).isEqualTo(expectedLabelMap);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/stats/AggregationDataTest.java b/api/src/test/java/io/opencensus/stats/AggregationDataTest.java
index a6d6d1d..cfdb897 100644
--- a/api/src/test/java/io/opencensus/stats/AggregationDataTest.java
+++ b/api/src/test/java/io/opencensus/stats/AggregationDataTest.java
@@ -22,9 +22,11 @@
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
 import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.stats.AggregationData.CountData;
 import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.DistributionData.Exemplar;
 import io.opencensus.stats.AggregationData.LastValueDataDouble;
 import io.opencensus.stats.AggregationData.LastValueDataLong;
 import io.opencensus.stats.AggregationData.MeanData;
@@ -48,18 +50,18 @@
   private static final double TOLERANCE = 1e-6;
   private static final Timestamp TIMESTAMP_1 = Timestamp.create(1, 0);
   private static final Timestamp TIMESTAMP_2 = Timestamp.create(2, 0);
-  private static final Map<String, String> ATTACHMENTS = Collections.singletonMap("key", "value");
+  private static final AttachmentValue ATTACHMENT_VALUE = AttachmentValueString.create("value");
+  private static final Map<String, AttachmentValue> ATTACHMENTS =
+      Collections.singletonMap("key", ATTACHMENT_VALUE);
 
   @Rule public ExpectedException thrown = ExpectedException.none();
 
   @Test
   public void testCreateDistributionData() {
     DistributionData distributionData =
-        DistributionData.create(7.7, 10, 1.1, 9.9, 32.2, Arrays.asList(4L, 1L, 5L));
+        DistributionData.create(7.7, 10, 32.2, Arrays.asList(4L, 1L, 5L));
     assertThat(distributionData.getMean()).isWithin(TOLERANCE).of(7.7);
     assertThat(distributionData.getCount()).isEqualTo(10);
-    assertThat(distributionData.getMin()).isWithin(TOLERANCE).of(1.1);
-    assertThat(distributionData.getMax()).isWithin(TOLERANCE).of(9.9);
     assertThat(distributionData.getSumOfSquaredDeviations()).isWithin(TOLERANCE).of(32.2);
     assertThat(distributionData.getBucketCounts()).containsExactly(4L, 1L, 5L).inOrder();
   }
@@ -70,60 +72,29 @@
     Exemplar exemplar2 = Exemplar.create(1, TIMESTAMP_1, ATTACHMENTS);
     DistributionData distributionData =
         DistributionData.create(
-            7.7, 10, 1.1, 9.9, 32.2, Arrays.asList(4L, 1L), Arrays.asList(exemplar1, exemplar2));
+            7.7, 10, 32.2, Arrays.asList(4L, 1L), Arrays.asList(exemplar1, exemplar2));
     assertThat(distributionData.getExemplars()).containsExactly(exemplar1, exemplar2).inOrder();
   }
 
   @Test
-  public void testExemplar() {
-    Exemplar exemplar = Exemplar.create(15.0, TIMESTAMP_1, ATTACHMENTS);
-    assertThat(exemplar.getValue()).isEqualTo(15.0);
-    assertThat(exemplar.getTimestamp()).isEqualTo(TIMESTAMP_1);
-    assertThat(exemplar.getAttachments()).isEqualTo(ATTACHMENTS);
-  }
-
-  @Test
-  public void testExemplar_PreventNullAttachments() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("attachments");
-    Exemplar.create(15, TIMESTAMP_1, null);
-  }
-
-  @Test
-  public void testExemplar_PreventNullAttachmentKey() {
-    Map<String, String> attachments = Collections.singletonMap(null, "value");
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("key of attachment");
-    Exemplar.create(15, TIMESTAMP_1, attachments);
-  }
-
-  @Test
-  public void testExemplar_PreventNullAttachmentValue() {
-    Map<String, String> attachments = Collections.singletonMap("key", null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("value of attachment");
-    Exemplar.create(15, TIMESTAMP_1, attachments);
-  }
-
-  @Test
   public void preventNullBucketCountList() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("bucketCounts");
-    DistributionData.create(1, 1, 1, 1, 0, null);
+    DistributionData.create(1, 1, 0, null);
   }
 
   @Test
   public void preventNullBucket() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("bucket");
-    DistributionData.create(1, 1, 1, 1, 0, Arrays.asList(0L, 1L, null));
+    thrown.expectMessage("bucketCount");
+    DistributionData.create(1, 1, 0, Arrays.asList(0L, 1L, null));
   }
 
   @Test
   public void preventNullExemplarList() {
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("exemplar list should not be null.");
-    DistributionData.create(1, 1, 1, 1, 0, Arrays.asList(0L, 1L, 1L), null);
+    thrown.expectMessage("exemplars");
+    DistributionData.create(1, 1, 0, Arrays.asList(0L, 1L, 1L), null);
   }
 
   @Test
@@ -131,14 +102,7 @@
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("exemplar");
     DistributionData.create(
-        1, 1, 1, 1, 0, Arrays.asList(0L, 1L, 1L), Collections.<Exemplar>singletonList(null));
-  }
-
-  @Test
-  public void preventMinIsGreaterThanMax() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("max should be greater or equal to min.");
-    DistributionData.create(1, 1, 10, 1, 0, Arrays.asList(0L, 1L, 0L));
+        1, 1, 0, Arrays.asList(0L, 1L, 1L), Collections.<Exemplar>singletonList(null));
   }
 
   @Test
@@ -150,14 +114,13 @@
         .addEqualityGroup(CountData.create(40), CountData.create(40))
         .addEqualityGroup(CountData.create(80), CountData.create(80))
         .addEqualityGroup(
-            DistributionData.create(10, 10, 1, 1, 0, Arrays.asList(0L, 10L, 0L)),
-            DistributionData.create(10, 10, 1, 1, 0, Arrays.asList(0L, 10L, 0L)))
-        .addEqualityGroup(DistributionData.create(10, 10, 1, 1, 0, Arrays.asList(0L, 10L, 100L)))
-        .addEqualityGroup(DistributionData.create(110, 10, 1, 1, 0, Arrays.asList(0L, 10L, 0L)))
-        .addEqualityGroup(DistributionData.create(10, 110, 1, 1, 0, Arrays.asList(0L, 10L, 0L)))
-        .addEqualityGroup(DistributionData.create(10, 10, -1, 1, 0, Arrays.asList(0L, 10L, 0L)))
-        .addEqualityGroup(DistributionData.create(10, 10, 1, 5, 0, Arrays.asList(0L, 10L, 0L)))
-        .addEqualityGroup(DistributionData.create(10, 10, 1, 1, 55.5, Arrays.asList(0L, 10L, 0L)))
+            DistributionData.create(10, 10, 0, Arrays.asList(0L, 10L, 0L)),
+            DistributionData.create(10, 10, 0, Arrays.asList(0L, 10L, 0L)))
+        .addEqualityGroup(DistributionData.create(10, 10, 0, Arrays.asList(0L, 10L, 100L)))
+        .addEqualityGroup(DistributionData.create(110, 10, 0, Arrays.asList(0L, 10L, 0L)))
+        .addEqualityGroup(DistributionData.create(10, 110, 0, Arrays.asList(0L, 10L, 0L)))
+        .addEqualityGroup(DistributionData.create(10, 10, 10, Arrays.asList(0L, 10L, 0L)))
+        .addEqualityGroup(DistributionData.create(10, 10, 0, Arrays.asList(0L, 110L, 0L)))
         .addEqualityGroup(MeanData.create(5.0, 1), MeanData.create(5.0, 1))
         .addEqualityGroup(MeanData.create(-5.0, 1), MeanData.create(-5.0, 1))
         .addEqualityGroup(LastValueDataDouble.create(20.0), LastValueDataDouble.create(20.0))
@@ -172,7 +135,7 @@
             SumDataDouble.create(10.0),
             SumDataLong.create(100000000),
             CountData.create(40),
-            DistributionData.create(1, 1, 1, 1, 0, Arrays.asList(0L, 10L, 0L)),
+            DistributionData.create(1, 1, 0, Arrays.asList(0L, 10L, 0L)),
             LastValueDataDouble.create(20.0),
             LastValueDataLong.create(200000000L));
 
diff --git a/api/src/test/java/io/opencensus/stats/AggregationTest.java b/api/src/test/java/io/opencensus/stats/AggregationTest.java
index cf33703..11606da 100644
--- a/api/src/test/java/io/opencensus/stats/AggregationTest.java
+++ b/api/src/test/java/io/opencensus/stats/AggregationTest.java
@@ -61,10 +61,11 @@
         .addEqualityGroup(Count.create(), Count.create())
         .addEqualityGroup(
             Distribution.create(BucketBoundaries.create(Arrays.asList(-10.0, 1.0, 5.0))),
-            Distribution.create(BucketBoundaries.create(Arrays.asList(-10.0, 1.0, 5.0))))
-        .addEqualityGroup(
-            Distribution.create(BucketBoundaries.create(Arrays.asList(0.0, 1.0, 5.0))),
+            Distribution.create(BucketBoundaries.create(Arrays.asList(-10.0, 1.0, 5.0))),
             Distribution.create(BucketBoundaries.create(Arrays.asList(0.0, 1.0, 5.0))))
+        .addEqualityGroup(
+            Distribution.create(BucketBoundaries.create(Arrays.asList(1.0, 2.0, 5.0))),
+            Distribution.create(BucketBoundaries.create(Arrays.asList(1.0, 2.0, 5.0))))
         .addEqualityGroup(Mean.create(), Mean.create())
         .addEqualityGroup(LastValue.create(), LastValue.create())
         .testEquals();
diff --git a/api/src/test/java/io/opencensus/stats/BucketBoundariesTest.java b/api/src/test/java/io/opencensus/stats/BucketBoundariesTest.java
index 36f2edb..f5b0f29 100644
--- a/api/src/test/java/io/opencensus/stats/BucketBoundariesTest.java
+++ b/api/src/test/java/io/opencensus/stats/BucketBoundariesTest.java
@@ -37,8 +37,24 @@
   @Test
   public void testConstructBoundaries() {
     List<Double> buckets = Arrays.asList(0.0, 1.0, 2.0);
+    List<Double> expectedBuckets = Arrays.asList(1.0, 2.0);
     BucketBoundaries bucketBoundaries = BucketBoundaries.create(buckets);
-    assertThat(bucketBoundaries.getBoundaries()).isEqualTo(buckets);
+    assertThat(bucketBoundaries.getBoundaries()).isEqualTo(expectedBuckets);
+  }
+
+  @Test
+  public void testConstructBoundaries_IgnoreNegativeBounds() {
+    List<Double> buckets = Arrays.asList(-5.0, -1.0, 1.0, 2.0);
+    List<Double> expectedBuckets = Arrays.asList(1.0, 2.0);
+    BucketBoundaries bucketBoundaries = BucketBoundaries.create(buckets);
+    assertThat(bucketBoundaries.getBoundaries()).isEqualTo(expectedBuckets);
+  }
+
+  @Test
+  public void testConstructBoundaries_IgnoreZeroAndNegativeBounds() {
+    List<Double> buckets = Arrays.asList(-5.0, -2.0, -1.0, 0.0);
+    BucketBoundaries bucketBoundaries = BucketBoundaries.create(buckets);
+    assertThat(bucketBoundaries.getBoundaries()).isEmpty();
   }
 
   @Test
@@ -50,7 +66,7 @@
     BucketBoundaries bucketBoundaries = BucketBoundaries.create(original);
     original.set(2, 3.0);
     original.add(4.0);
-    List<Double> expected = Arrays.asList(0.0, 1.0, 2.0);
+    List<Double> expected = Arrays.asList(1.0, 2.0);
     assertThat(bucketBoundaries.getBoundaries()).isNotEqualTo(original);
     assertThat(bucketBoundaries.getBoundaries()).isEqualTo(expected);
   }
diff --git a/api/src/test/java/io/opencensus/stats/NoopStatsTest.java b/api/src/test/java/io/opencensus/stats/NoopStatsTest.java
index 4bae14a..6bf7448 100644
--- a/api/src/test/java/io/opencensus/stats/NoopStatsTest.java
+++ b/api/src/test/java/io/opencensus/stats/NoopStatsTest.java
@@ -18,6 +18,8 @@
 
 import static com.google.common.truth.Truth.assertThat;
 
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
@@ -40,6 +42,7 @@
   private static final Tag TAG = Tag.create(TagKey.create("key"), TagValue.create("value"));
   private static final MeasureDouble MEASURE =
       Measure.MeasureDouble.create("my measure", "description", "s");
+  private static final AttachmentValue ATTACHMENT_VALUE = AttachmentValueString.create("value");
 
   private final TagContext tagContext =
       new TagContext() {
@@ -55,7 +58,7 @@
   @Test
   public void noopStatsComponent() {
     assertThat(NoopStats.newNoopStatsComponent().getStatsRecorder())
-        .isSameAs(NoopStats.getNoopStatsRecorder());
+        .isSameInstanceAs(NoopStats.getNoopStatsRecorder());
     assertThat(NoopStats.newNoopStatsComponent().getViewManager())
         .isInstanceOf(NoopStats.newNoopViewManager().getClass());
   }
@@ -98,7 +101,7 @@
     MeasureMap measureMap = NoopStats.getNoopStatsRecorder().newMeasureMap();
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("key");
-    measureMap.putAttachment(null, "value");
+    measureMap.putAttachment(null, ATTACHMENT_VALUE);
   }
 
   @Test
@@ -106,7 +109,12 @@
     MeasureMap measureMap = NoopStats.getNoopStatsRecorder().newMeasureMap();
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("value");
-    measureMap.putAttachment("key", null);
+    measureMap.putAttachment("key", (AttachmentValue) null);
+  }
+
+  @Test
+  public void noopStatsRecorder_PutNegativeValue() {
+    NoopStats.getNoopStatsRecorder().newMeasureMap().put(MEASURE, -5).record(tagContext);
   }
 
   // The NoopStatsRecorder should do nothing, so this test just checks that record doesn't throw an
diff --git a/api/src/test/java/io/opencensus/stats/ViewDataTest.java b/api/src/test/java/io/opencensus/stats/ViewDataTest.java
index 0120ffe..ccd12a6 100644
--- a/api/src/test/java/io/opencensus/stats/ViewDataTest.java
+++ b/api/src/test/java/io/opencensus/stats/ViewDataTest.java
@@ -285,9 +285,9 @@
   private static final ImmutableMap<List<TagValue>, DistributionData> ENTRIES =
       ImmutableMap.of(
           Arrays.asList(V1, V2),
-          DistributionData.create(1, 1, 1, 1, 0, Arrays.asList(0L, 1L, 0L)),
+          DistributionData.create(1, 1, 0, Arrays.asList(0L, 1L, 0L)),
           Arrays.asList(V10, V20),
-          DistributionData.create(-5, 6, -20, 5, 100.1, Arrays.asList(5L, 0L, 1L)));
+          DistributionData.create(-5, 6, 100.1, Arrays.asList(5L, 0L, 1L)));
 
   // name
   private static final View.Name NAME = View.Name.create("test-view");
diff --git a/api/src/test/java/io/opencensus/stats/ViewTest.java b/api/src/test/java/io/opencensus/stats/ViewTest.java
index afba1bc..91748d8 100644
--- a/api/src/test/java/io/opencensus/stats/ViewTest.java
+++ b/api/src/test/java/io/opencensus/stats/ViewTest.java
@@ -56,7 +56,7 @@
                 TagKey.create("ab"), TagKey.create("a"), TagKey.create("A"), TagKey.create("b")));
     assertThat(view.getColumns())
         .containsExactly(
-            TagKey.create("A"), TagKey.create("a"), TagKey.create("ab"), TagKey.create("b"))
+            TagKey.create("a"), TagKey.create("A"), TagKey.create("ab"), TagKey.create("b"))
         .inOrder();
   }
 
diff --git a/api/src/test/java/io/opencensus/tags/InternalUtilsTest.java b/api/src/test/java/io/opencensus/tags/InternalUtilsTest.java
index 65482de..41790e2 100644
--- a/api/src/test/java/io/opencensus/tags/InternalUtilsTest.java
+++ b/api/src/test/java/io/opencensus/tags/InternalUtilsTest.java
@@ -39,6 +39,6 @@
             return iterator;
           }
         };
-    assertThat(InternalUtils.getTags(ctx)).isSameAs(iterator);
+    assertThat(InternalUtils.getTags(ctx)).isSameInstanceAs(iterator);
   }
 }
diff --git a/api/src/test/java/io/opencensus/tags/NoopTagsTest.java b/api/src/test/java/io/opencensus/tags/NoopTagsTest.java
index db07520..851e265 100644
--- a/api/src/test/java/io/opencensus/tags/NoopTagsTest.java
+++ b/api/src/test/java/io/opencensus/tags/NoopTagsTest.java
@@ -23,8 +23,12 @@
 import io.opencensus.tags.propagation.TagContextBinarySerializer;
 import io.opencensus.tags.propagation.TagContextDeserializationException;
 import io.opencensus.tags.propagation.TagContextSerializationException;
+import io.opencensus.tags.propagation.TagContextTextFormat;
+import io.opencensus.tags.propagation.TagContextTextFormat.Getter;
+import io.opencensus.tags.propagation.TagContextTextFormat.Setter;
 import java.util.Arrays;
 import java.util.Iterator;
+import javax.annotation.Nullable;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -46,13 +50,29 @@
         }
       };
 
+  private static final Setter<Object> NOOP_SETTER =
+      new Setter<Object>() {
+        @Override
+        public void put(Object carrier, String key, String value) {}
+      };
+
+  private static final Getter<Object> NOOP_GETTER =
+      new Getter<Object>() {
+        @Nullable
+        @Override
+        public String get(Object carrier, String key) {
+          return null;
+        }
+      };
+
   @Rule public final ExpectedException thrown = ExpectedException.none();
 
   @Test
   public void noopTagsComponent() {
-    assertThat(NoopTags.newNoopTagsComponent().getTagger()).isSameAs(NoopTags.getNoopTagger());
+    assertThat(NoopTags.newNoopTagsComponent().getTagger())
+        .isSameInstanceAs(NoopTags.getNoopTagger());
     assertThat(NoopTags.newNoopTagsComponent().getTagPropagationComponent())
-        .isSameAs(NoopTags.getNoopTagPropagationComponent());
+        .isSameInstanceAs(NoopTags.getNoopTagPropagationComponent());
   }
 
   @Test
@@ -88,12 +108,13 @@
   @Test
   public void noopTagger() {
     Tagger noopTagger = NoopTags.getNoopTagger();
-    assertThat(noopTagger.empty()).isSameAs(NoopTags.getNoopTagContext());
-    assertThat(noopTagger.getCurrentTagContext()).isSameAs(NoopTags.getNoopTagContext());
-    assertThat(noopTagger.emptyBuilder()).isSameAs(NoopTags.getNoopTagContextBuilder());
-    assertThat(noopTagger.toBuilder(TAG_CONTEXT)).isSameAs(NoopTags.getNoopTagContextBuilder());
-    assertThat(noopTagger.currentBuilder()).isSameAs(NoopTags.getNoopTagContextBuilder());
-    assertThat(noopTagger.withTagContext(TAG_CONTEXT)).isSameAs(NoopScope.getInstance());
+    assertThat(noopTagger.empty()).isSameInstanceAs(NoopTags.getNoopTagContext());
+    assertThat(noopTagger.getCurrentTagContext()).isSameInstanceAs(NoopTags.getNoopTagContext());
+    assertThat(noopTagger.emptyBuilder()).isSameInstanceAs(NoopTags.getNoopTagContextBuilder());
+    assertThat(noopTagger.toBuilder(TAG_CONTEXT))
+        .isSameInstanceAs(NoopTags.getNoopTagContextBuilder());
+    assertThat(noopTagger.currentBuilder()).isSameInstanceAs(NoopTags.getNoopTagContextBuilder());
+    assertThat(noopTagger.withTagContext(TAG_CONTEXT)).isSameInstanceAs(NoopScope.getInstance());
   }
 
   @Test
@@ -112,12 +133,14 @@
 
   @Test
   public void noopTagContextBuilder() {
-    assertThat(NoopTags.getNoopTagContextBuilder().build()).isSameAs(NoopTags.getNoopTagContext());
+    assertThat(NoopTags.getNoopTagContextBuilder().build())
+        .isSameInstanceAs(NoopTags.getNoopTagContext());
     assertThat(NoopTags.getNoopTagContextBuilder().put(KEY, VALUE).build())
-        .isSameAs(NoopTags.getNoopTagContext());
-    assertThat(NoopTags.getNoopTagContextBuilder().buildScoped()).isSameAs(NoopScope.getInstance());
+        .isSameInstanceAs(NoopTags.getNoopTagContext());
+    assertThat(NoopTags.getNoopTagContextBuilder().buildScoped())
+        .isSameInstanceAs(NoopScope.getInstance());
     assertThat(NoopTags.getNoopTagContextBuilder().put(KEY, VALUE).buildScoped())
-        .isSameAs(NoopScope.getInstance());
+        .isSameInstanceAs(NoopScope.getInstance());
   }
 
   @Test
@@ -135,6 +158,13 @@
   }
 
   @Test
+  public void noopTagContextBuilder_Put_DisallowsNullTagMetadata() {
+    TagContextBuilder noopBuilder = NoopTags.getNoopTagContextBuilder();
+    thrown.expect(NullPointerException.class);
+    noopBuilder.put(KEY, VALUE, null);
+  }
+
+  @Test
   public void noopTagContextBuilder_Remove_DisallowsNullKey() {
     TagContextBuilder noopBuilder = NoopTags.getNoopTagContextBuilder();
     thrown.expect(NullPointerException.class);
@@ -149,7 +179,9 @@
   @Test
   public void noopTagPropagationComponent() {
     assertThat(NoopTags.getNoopTagPropagationComponent().getBinarySerializer())
-        .isSameAs(NoopTags.getNoopTagContextBinarySerializer());
+        .isSameInstanceAs(NoopTags.getNoopTagContextBinarySerializer());
+    assertThat(NoopTags.getNoopTagPropagationComponent().getCorrelationContextFormat())
+        .isSameInstanceAs(NoopTags.getNoopTagContextTextSerializer());
   }
 
   @Test
@@ -176,4 +208,52 @@
     thrown.expect(NullPointerException.class);
     noopSerializer.fromByteArray(null);
   }
+
+  @Test
+  public void noopTagContextTextFormat()
+      throws TagContextDeserializationException, TagContextSerializationException {
+    NoopTags.getNoopTagContextTextSerializer().inject(TAG_CONTEXT, new Object(), NOOP_SETTER);
+    assertThat(NoopTags.getNoopTagContextTextSerializer().extract(new Object(), NOOP_GETTER))
+        .isEqualTo(NoopTags.getNoopTagContext());
+  }
+
+  @Test
+  public void noopTagContextTextFormat_inject_DisallowsNullTagContext()
+      throws TagContextSerializationException {
+    TagContextTextFormat noopSerializer = NoopTags.getNoopTagContextTextSerializer();
+    thrown.expect(NullPointerException.class);
+    noopSerializer.inject(null, new Object(), NOOP_SETTER);
+  }
+
+  @Test
+  public void noopTagContextTextFormat_inject_DisallowsNullCarrier()
+      throws TagContextSerializationException {
+    TagContextTextFormat noopSerializer = NoopTags.getNoopTagContextTextSerializer();
+    thrown.expect(NullPointerException.class);
+    noopSerializer.inject(TAG_CONTEXT, null, NOOP_SETTER);
+  }
+
+  @Test
+  public void noopTagContextTextFormat_inject_DisallowsNullSetter()
+      throws TagContextSerializationException {
+    TagContextTextFormat noopSerializer = NoopTags.getNoopTagContextTextSerializer();
+    thrown.expect(NullPointerException.class);
+    noopSerializer.inject(TAG_CONTEXT, new Object(), null);
+  }
+
+  @Test
+  public void noopTagContextTextFormat_extract_DisallowsNullCarrier()
+      throws TagContextDeserializationException {
+    TagContextTextFormat noopSerializer = NoopTags.getNoopTagContextTextSerializer();
+    thrown.expect(NullPointerException.class);
+    noopSerializer.extract(null, NOOP_GETTER);
+  }
+
+  @Test
+  public void noopTagContextTextFormat_extract_DisallowsNullGetter()
+      throws TagContextDeserializationException {
+    TagContextTextFormat noopSerializer = NoopTags.getNoopTagContextTextSerializer();
+    thrown.expect(NullPointerException.class);
+    noopSerializer.extract(new Object(), null);
+  }
 }
diff --git a/api/src/test/java/io/opencensus/tags/TagMetadataTest.java b/api/src/test/java/io/opencensus/tags/TagMetadataTest.java
new file mode 100644
index 0000000..d8e2814
--- /dev/null
+++ b/api/src/test/java/io/opencensus/tags/TagMetadataTest.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.tags;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.common.testing.EqualsTester;
+import io.opencensus.tags.TagMetadata.TagTtl;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Tests for {@link TagMetadata}. */
+@RunWith(JUnit4.class)
+public class TagMetadataTest {
+
+  @Test
+  public void testGetTagTtl() {
+    TagMetadata tagMetadata = TagMetadata.create(TagTtl.NO_PROPAGATION);
+    assertThat(tagMetadata.getTagTtl()).isEqualTo(TagTtl.NO_PROPAGATION);
+  }
+
+  @Test
+  public void testEquals() {
+    new EqualsTester()
+        .addEqualityGroup(
+            TagMetadata.create(TagTtl.NO_PROPAGATION), TagMetadata.create(TagTtl.NO_PROPAGATION))
+        .addEqualityGroup(TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION))
+        .testEquals();
+  }
+}
diff --git a/api/src/test/java/io/opencensus/tags/TagTest.java b/api/src/test/java/io/opencensus/tags/TagTest.java
index 3c899e6..428ff5a 100644
--- a/api/src/test/java/io/opencensus/tags/TagTest.java
+++ b/api/src/test/java/io/opencensus/tags/TagTest.java
@@ -19,6 +19,7 @@
 import static com.google.common.truth.Truth.assertThat;
 
 import com.google.common.testing.EqualsTester;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
@@ -27,20 +28,41 @@
 @RunWith(JUnit4.class)
 public final class TagTest {
 
+  private static final TagKey KEY = TagKey.create("KEY");
+  private static final TagKey KEY_2 = TagKey.create("KEY2");
+  private static final TagValue VALUE = TagValue.create("VALUE");
+  private static final TagValue VALUE_2 = TagValue.create("VALUE2");
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
+
   @Test
   public void testGetKey() {
-    assertThat(Tag.create(TagKey.create("k"), TagValue.create("v")).getKey())
-        .isEqualTo(TagKey.create("k"));
+    assertThat(Tag.create(KEY, VALUE).getKey()).isEqualTo(KEY);
+  }
+
+  @Test
+  public void testGetTagMetadata() {
+    assertThat(Tag.create(KEY, VALUE, METADATA_NO_PROPAGATION).getTagMetadata())
+        .isEqualTo(METADATA_NO_PROPAGATION);
+  }
+
+  @Test
+  public void testGetTagMetadata_default() {
+    assertThat(Tag.create(KEY, VALUE).getTagMetadata()).isEqualTo(METADATA_UNLIMITED_PROPAGATION);
   }
 
   @Test
   public void testTagEquals() {
     new EqualsTester()
         .addEqualityGroup(
-            Tag.create(TagKey.create("Key"), TagValue.create("foo")),
-            Tag.create(TagKey.create("Key"), TagValue.create("foo")))
-        .addEqualityGroup(Tag.create(TagKey.create("Key"), TagValue.create("bar")))
-        .addEqualityGroup(Tag.create(TagKey.create("Key2"), TagValue.create("foo")))
+            Tag.create(KEY, VALUE),
+            Tag.create(KEY, VALUE),
+            Tag.create(KEY, VALUE, METADATA_UNLIMITED_PROPAGATION))
+        .addEqualityGroup(Tag.create(KEY, VALUE_2))
+        .addEqualityGroup(Tag.create(KEY_2, VALUE))
+        .addEqualityGroup(Tag.create(KEY, VALUE, METADATA_NO_PROPAGATION))
         .testEquals();
   }
 }
diff --git a/api/src/test/java/io/opencensus/tags/unsafe/ContextUtilsTest.java b/api/src/test/java/io/opencensus/tags/unsafe/ContextUtilsTest.java
index c35c5dc..be4777d 100644
--- a/api/src/test/java/io/opencensus/tags/unsafe/ContextUtilsTest.java
+++ b/api/src/test/java/io/opencensus/tags/unsafe/ContextUtilsTest.java
@@ -31,24 +31,19 @@
 /** Unit tests for {@link ContextUtils}. */
 @RunWith(JUnit4.class)
 public final class ContextUtilsTest {
-  @Test
-  public void testContextKeyName() {
-    // Context.Key.toString() returns the name.
-    assertThat(ContextUtils.TAG_CONTEXT_KEY.toString()).isEqualTo("opencensus-tag-context-key");
-  }
 
   @Test
   public void testGetCurrentTagContext_DefaultContext() {
-    TagContext tags = ContextUtils.TAG_CONTEXT_KEY.get();
+    TagContext tags = ContextUtils.getValue(Context.current());
     assertThat(tags).isNotNull();
     assertThat(asList(tags)).isEmpty();
   }
 
   @Test
   public void testGetCurrentTagContext_ContextSetToNull() {
-    Context orig = Context.current().withValue(ContextUtils.TAG_CONTEXT_KEY, null).attach();
+    Context orig = ContextUtils.withValue(Context.current(), null).attach();
     try {
-      TagContext tags = ContextUtils.TAG_CONTEXT_KEY.get();
+      TagContext tags = ContextUtils.getValue(Context.current());
       assertThat(tags).isNotNull();
       assertThat(asList(tags)).isEmpty();
     } finally {
diff --git a/api/src/test/java/io/opencensus/trace/BigendianEncodingTest.java b/api/src/test/java/io/opencensus/trace/BigendianEncodingTest.java
new file mode 100644
index 0000000..77476a7
--- /dev/null
+++ b/api/src/test/java/io/opencensus/trace/BigendianEncodingTest.java
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import java.nio.CharBuffer;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link BigendianEncoding}. */
+@RunWith(JUnit4.class)
+public class BigendianEncodingTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final long FIRST_LONG = 0x1213141516171819L;
+  private static final byte[] FIRST_BYTE_ARRAY =
+      new byte[] {0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19};
+  private static final char[] FIRST_CHAR_ARRAY =
+      new char[] {'1', '2', '1', '3', '1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9'};
+  private static final long SECOND_LONG = 0xFFEEDDCCBBAA9988L;
+  private static final byte[] SECOND_BYTE_ARRAY =
+      new byte[] {
+        (byte) 0xFF, (byte) 0xEE, (byte) 0xDD, (byte) 0xCC,
+        (byte) 0xBB, (byte) 0xAA, (byte) 0x99, (byte) 0x88
+      };
+  private static final char[] SECOND_CHAR_ARRAY =
+      new char[] {'f', 'f', 'e', 'e', 'd', 'd', 'c', 'c', 'b', 'b', 'a', 'a', '9', '9', '8', '8'};
+  private static final byte[] BOTH_BYTE_ARRAY =
+      new byte[] {
+        0x12,
+        0x13,
+        0x14,
+        0x15,
+        0x16,
+        0x17,
+        0x18,
+        0x19,
+        (byte) 0xFF,
+        (byte) 0xEE,
+        (byte) 0xDD,
+        (byte) 0xCC,
+        (byte) 0xBB,
+        (byte) 0xAA,
+        (byte) 0x99,
+        (byte) 0x88
+      };
+  private static final char[] BOTH_CHAR_ARRAY =
+      new char[] {
+        '1', '2', '1', '3', '1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9', 'f', 'f',
+        'e', 'e', 'd', 'd', 'c', 'c', 'b', 'b', 'a', 'a', '9', '9', '8', '8'
+      };
+
+  @Test
+  public void longToByteArray_Fails() {
+    // These contain bytes not in the decoding.
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("array too small");
+    BigendianEncoding.longToByteArray(123, new byte[BigendianEncoding.LONG_BYTES], 1);
+  }
+
+  @Test
+  public void longToByteArray() {
+    byte[] result1 = new byte[BigendianEncoding.LONG_BYTES];
+    BigendianEncoding.longToByteArray(FIRST_LONG, result1, 0);
+    assertThat(result1).isEqualTo(FIRST_BYTE_ARRAY);
+
+    byte[] result2 = new byte[BigendianEncoding.LONG_BYTES];
+    BigendianEncoding.longToByteArray(SECOND_LONG, result2, 0);
+    assertThat(result2).isEqualTo(SECOND_BYTE_ARRAY);
+
+    byte[] result3 = new byte[2 * BigendianEncoding.LONG_BYTES];
+    BigendianEncoding.longToByteArray(FIRST_LONG, result3, 0);
+    BigendianEncoding.longToByteArray(SECOND_LONG, result3, BigendianEncoding.LONG_BYTES);
+    assertThat(result3).isEqualTo(BOTH_BYTE_ARRAY);
+  }
+
+  @Test
+  public void longFromByteArray_ArrayToSmall() {
+    // These contain bytes not in the decoding.
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("array too small");
+    BigendianEncoding.longFromByteArray(new byte[BigendianEncoding.LONG_BYTES], 1);
+  }
+
+  @Test
+  public void longFromByteArray() {
+    assertThat(BigendianEncoding.longFromByteArray(FIRST_BYTE_ARRAY, 0)).isEqualTo(FIRST_LONG);
+
+    assertThat(BigendianEncoding.longFromByteArray(SECOND_BYTE_ARRAY, 0)).isEqualTo(SECOND_LONG);
+
+    assertThat(BigendianEncoding.longFromByteArray(BOTH_BYTE_ARRAY, 0)).isEqualTo(FIRST_LONG);
+
+    assertThat(BigendianEncoding.longFromByteArray(BOTH_BYTE_ARRAY, BigendianEncoding.LONG_BYTES))
+        .isEqualTo(SECOND_LONG);
+  }
+
+  @Test
+  public void toFromByteArray() {
+    toFromByteArrayValidate(0x8000000000000000L);
+    toFromByteArrayValidate(-1);
+    toFromByteArrayValidate(0);
+    toFromByteArrayValidate(1);
+    toFromByteArrayValidate(0x7FFFFFFFFFFFFFFFL);
+  }
+
+  @Test
+  public void longToBase16String() {
+    char[] chars1 = new char[BigendianEncoding.LONG_BASE16];
+    BigendianEncoding.longToBase16String(FIRST_LONG, chars1, 0);
+    assertThat(chars1).isEqualTo(FIRST_CHAR_ARRAY);
+
+    char[] chars2 = new char[BigendianEncoding.LONG_BASE16];
+    BigendianEncoding.longToBase16String(SECOND_LONG, chars2, 0);
+    assertThat(chars2).isEqualTo(SECOND_CHAR_ARRAY);
+
+    char[] chars3 = new char[2 * BigendianEncoding.LONG_BASE16];
+    BigendianEncoding.longToBase16String(FIRST_LONG, chars3, 0);
+    BigendianEncoding.longToBase16String(SECOND_LONG, chars3, BigendianEncoding.LONG_BASE16);
+    assertThat(chars3).isEqualTo(BOTH_CHAR_ARRAY);
+  }
+
+  @Test
+  public void longFromBase16String_InputTooSmall() {
+    // Valid base16 strings always have an even length.
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("chars too small");
+    BigendianEncoding.longFromBase16String(
+        CharBuffer.wrap(new char[BigendianEncoding.LONG_BASE16]), 1);
+  }
+
+  @Test
+  public void longFromBase16String_UnrecongnizedCharacters() {
+    // These contain bytes not in the decoding.
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("invalid character g");
+    BigendianEncoding.longFromBase16String("0123456789gbcdef", 0);
+  }
+
+  @Test
+  public void longFromBase16String() {
+    assertThat(BigendianEncoding.longFromBase16String(CharBuffer.wrap(FIRST_CHAR_ARRAY), 0))
+        .isEqualTo(FIRST_LONG);
+
+    assertThat(BigendianEncoding.longFromBase16String(CharBuffer.wrap(SECOND_CHAR_ARRAY), 0))
+        .isEqualTo(SECOND_LONG);
+
+    assertThat(BigendianEncoding.longFromBase16String(CharBuffer.wrap(BOTH_CHAR_ARRAY), 0))
+        .isEqualTo(FIRST_LONG);
+
+    assertThat(
+            BigendianEncoding.longFromBase16String(
+                CharBuffer.wrap(BOTH_CHAR_ARRAY), BigendianEncoding.LONG_BASE16))
+        .isEqualTo(SECOND_LONG);
+  }
+
+  @Test
+  public void toFromBase16String() {
+    toFromBase16StringValidate(0x8000000000000000L);
+    toFromBase16StringValidate(-1);
+    toFromBase16StringValidate(0);
+    toFromBase16StringValidate(1);
+    toFromBase16StringValidate(0x7FFFFFFFFFFFFFFFL);
+  }
+
+  private static void toFromByteArrayValidate(long value) {
+    byte[] array = new byte[BigendianEncoding.LONG_BYTES];
+    BigendianEncoding.longToByteArray(value, array, 0);
+    assertThat(BigendianEncoding.longFromByteArray(array, 0)).isEqualTo(value);
+  }
+
+  private static void toFromBase16StringValidate(long value) {
+    char[] dest = new char[BigendianEncoding.LONG_BASE16];
+    BigendianEncoding.longToBase16String(value, dest, 0);
+    assertThat(BigendianEncoding.longFromBase16String(CharBuffer.wrap(dest), 0)).isEqualTo(value);
+  }
+}
diff --git a/api/src/test/java/io/opencensus/trace/CurrentSpanUtilsTest.java b/api/src/test/java/io/opencensus/trace/CurrentSpanUtilsTest.java
index 6b16c3d..d186197 100644
--- a/api/src/test/java/io/opencensus/trace/CurrentSpanUtilsTest.java
+++ b/api/src/test/java/io/opencensus/trace/CurrentSpanUtilsTest.java
@@ -17,13 +17,12 @@
 package io.opencensus.trace;
 
 import static com.google.common.truth.Truth.assertThat;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyZeroInteractions;
 
-import io.grpc.Context;
 import io.opencensus.common.Scope;
-import io.opencensus.trace.unsafe.ContextUtils;
+import io.opencensus.trace.unsafe.ContextHandleUtils;
 import java.util.concurrent.Callable;
 import org.junit.Before;
 import org.junit.Test;
@@ -68,227 +67,228 @@
 
   @Test
   public void getCurrentSpan_WhenNoContext() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void getCurrentSpan() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
-    Context origContext = Context.current().withValue(ContextUtils.CONTEXT_SPAN_KEY, span).attach();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
+    ContextHandle origContext =
+        ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), span).attach();
     // Make sure context is detached even if test fails.
     try {
-      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
     } finally {
-      Context.current().detach(origContext);
+      ContextHandleUtils.currentContext().detach(origContext);
     }
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpan_CloseDetaches() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Scope ws = CurrentSpanUtils.withSpan(span, false);
     try {
-      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
     } finally {
       ws.close();
     }
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     verifyZeroInteractions(span);
   }
 
   @Test
   public void withSpan_CloseDetachesAndEndsSpan() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Scope ss = CurrentSpanUtils.withSpan(span, true);
     try {
-      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+      assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
     } finally {
       ss.close();
     }
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     verify(span).end(same(EndSpanOptions.DEFAULT));
   }
 
   @Test
   public void withSpanRunnable() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Runnable runnable =
         new Runnable() {
           @Override
           public void run() {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
           }
         };
     CurrentSpanUtils.withSpan(span, false, runnable).run();
     verifyZeroInteractions(span);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanRunnable_EndSpan() {
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Runnable runnable =
         new Runnable() {
           @Override
           public void run() {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
           }
         };
     CurrentSpanUtils.withSpan(span, true, runnable).run();
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanRunnable_WithError() {
     final AssertionError error = new AssertionError("MyError");
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Runnable runnable =
         new Runnable() {
           @Override
           public void run() {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw error;
           }
         };
     executeRunnableAndExpectError(runnable, error);
     verify(span).setStatus(Status.UNKNOWN.withDescription("MyError"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanRunnable_WithErrorNoMessage() {
     final AssertionError error = new AssertionError();
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Runnable runnable =
         new Runnable() {
           @Override
           public void run() {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw error;
           }
         };
     executeRunnableAndExpectError(runnable, error);
     verify(span).setStatus(Status.UNKNOWN.withDescription("AssertionError"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable() throws Exception {
     final Object ret = new Object();
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             return ret;
           }
         };
     assertThat(CurrentSpanUtils.withSpan(span, false, callable).call()).isEqualTo(ret);
     verifyZeroInteractions(span);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable_EndSpan() throws Exception {
     final Object ret = new Object();
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             return ret;
           }
         };
     assertThat(CurrentSpanUtils.withSpan(span, true, callable).call()).isEqualTo(ret);
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable_WithException() {
     final Exception exception = new Exception("MyException");
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw exception;
           }
         };
     executeCallableAndExpectError(callable, exception);
     verify(span).setStatus(Status.UNKNOWN.withDescription("MyException"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable_WithExceptionNoMessage() {
     final Exception exception = new Exception();
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw exception;
           }
         };
     executeCallableAndExpectError(callable, exception);
     verify(span).setStatus(Status.UNKNOWN.withDescription("Exception"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable_WithError() {
     final AssertionError error = new AssertionError("MyError");
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw error;
           }
         };
     executeCallableAndExpectError(callable, error);
     verify(span).setStatus(Status.UNKNOWN.withDescription("MyError"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 
   @Test
   public void withSpanCallable_WithErrorNoMessage() {
     final AssertionError error = new AssertionError();
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
     Callable<Object> callable =
         new Callable<Object>() {
           @Override
           public Object call() throws Exception {
             // When we run the runnable we will have the span in the current Context.
-            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameAs(span);
+            assertThat(CurrentSpanUtils.getCurrentSpan()).isSameInstanceAs(span);
             throw error;
           }
         };
     executeCallableAndExpectError(callable, error);
     verify(span).setStatus(Status.UNKNOWN.withDescription("AssertionError"));
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(CurrentSpanUtils.getCurrentSpan()).isNull();
+    assertThat(CurrentSpanUtils.getCurrentSpan()).isEqualTo(BlankSpan.INSTANCE);
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/LinkTest.java b/api/src/test/java/io/opencensus/trace/LinkTest.java
index 5c1ebf5..a2038d8 100644
--- a/api/src/test/java/io/opencensus/trace/LinkTest.java
+++ b/api/src/test/java/io/opencensus/trace/LinkTest.java
@@ -102,11 +102,18 @@
     assertThat(link.toString()).contains(spanContext.getTraceId().toString());
     assertThat(link.toString()).contains(spanContext.getSpanId().toString());
     assertThat(link.toString()).contains("CHILD_LINKED_SPAN");
-    assertThat(link.toString()).contains(attributesMap.toString());
+    for (Map.Entry<String, AttributeValue> entry : attributesMap.entrySet()) {
+      // This depends on HashMap#toString(), via AbstractMap#toString(), having a specified format.
+      // In particular, each entry is formatted as `key=value`, with no spaces around the `=`.
+      // If Link is changed to use something other than a HashMap, this may no longer pass.
+      assertThat(link.toString()).contains(entry.getKey() + "=" + entry.getValue());
+    }
     link = Link.fromSpanContext(spanContext, Type.PARENT_LINKED_SPAN, attributesMap);
     assertThat(link.toString()).contains(spanContext.getTraceId().toString());
     assertThat(link.toString()).contains(spanContext.getSpanId().toString());
     assertThat(link.toString()).contains("PARENT_LINKED_SPAN");
-    assertThat(link.toString()).contains(attributesMap.toString());
+    for (Map.Entry<String, AttributeValue> entry : attributesMap.entrySet()) {
+      assertThat(link.toString()).contains(entry.getKey() + "=" + entry.getValue());
+    }
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/LowerCaseBase16EncodingTest.java b/api/src/test/java/io/opencensus/trace/LowerCaseBase16EncodingTest.java
deleted file mode 100644
index 3444d2b..0000000
--- a/api/src/test/java/io/opencensus/trace/LowerCaseBase16EncodingTest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.trace;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import java.nio.charset.Charset;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/** Unit tests for {@link io.opencensus.trace.LowerCaseBase16Encoding}. */
-@RunWith(JUnit4.class)
-public class LowerCaseBase16EncodingTest {
-  private static final Charset CHARSET = Charset.forName("UTF-8");
-
-  @Rule public ExpectedException thrown = ExpectedException.none();
-
-  @Test
-  public void valid_EncodeDecode() {
-    testEncoding("", "");
-    testEncoding("f", "66");
-    testEncoding("fo", "666f");
-    testEncoding("foo", "666f6f");
-    testEncoding("foob", "666f6f62");
-    testEncoding("fooba", "666f6f6261");
-    testEncoding("foobar", "666f6f626172");
-  }
-
-  @Test
-  public void invalidDecodings_UnrecongnizedCharacters() {
-    // These contain bytes not in the decoding.
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Invalid character g");
-    LowerCaseBase16Encoding.decodeToBytes("efhg");
-  }
-
-  @Test
-  public void invalidDecodings_InvalidInputLength() {
-    // Valid base16 strings always have an even length.
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Invalid input length 3");
-    LowerCaseBase16Encoding.decodeToBytes("abc");
-  }
-
-  @Test
-  public void invalidDecodings_InvalidInputLengthAndCharacter() {
-    // These have a combination of invalid length and unrecognized characters.
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Invalid input length 1");
-    LowerCaseBase16Encoding.decodeToBytes("?");
-  }
-
-  private static void testEncoding(String decoded, String encoded) {
-    testEncodes(decoded, encoded);
-    testDecodes(encoded, decoded);
-  }
-
-  private static void testEncodes(String decoded, String encoded) {
-    assertThat(LowerCaseBase16Encoding.encodeToString(decoded.getBytes(CHARSET)))
-        .isEqualTo(encoded);
-  }
-
-  private static void testDecodes(String encoded, String decoded) {
-    assertThat(LowerCaseBase16Encoding.decodeToBytes(encoded)).isEqualTo(decoded.getBytes(CHARSET));
-  }
-}
diff --git a/api/src/test/java/io/opencensus/trace/SpanBuilderTest.java b/api/src/test/java/io/opencensus/trace/SpanBuilderTest.java
index 839c894..4445aba 100644
--- a/api/src/test/java/io/opencensus/trace/SpanBuilderTest.java
+++ b/api/src/test/java/io/opencensus/trace/SpanBuilderTest.java
@@ -49,47 +49,47 @@
 
   @Test
   public void startScopedSpan() {
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     Scope scope = spanBuilder.startScopedSpan();
     try {
-      assertThat(tracer.getCurrentSpan()).isSameAs(span);
+      assertThat(tracer.getCurrentSpan()).isSameInstanceAs(span);
     } finally {
       scope.close();
     }
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void startSpanAndRun() {
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     spanBuilder.startSpanAndRun(
         new Runnable() {
           @Override
           public void run() {
-            assertThat(tracer.getCurrentSpan()).isSameAs(span);
+            assertThat(tracer.getCurrentSpan()).isSameInstanceAs(span);
           }
         });
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void startSpanAndCall() throws Exception {
     final Object ret = new Object();
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     assertThat(
             spanBuilder.startSpanAndCall(
                 new Callable<Object>() {
                   @Override
                   public Object call() throws Exception {
-                    assertThat(tracer.getCurrentSpan()).isSameAs(span);
+                    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(span);
                     return ret;
                   }
                 }))
         .isEqualTo(ret);
     verify(span).end(EndSpanOptions.DEFAULT);
-    assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
@@ -99,6 +99,6 @@
     spanBuilder.setRecordEvents(true);
     spanBuilder.setSampler(Samplers.alwaysSample());
     spanBuilder.setSpanKind(Kind.SERVER);
-    assertThat(spanBuilder.startSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(spanBuilder.startSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/SpanIdTest.java b/api/src/test/java/io/opencensus/trace/SpanIdTest.java
index 4a5bc2a..5a00406 100644
--- a/api/src/test/java/io/opencensus/trace/SpanIdTest.java
+++ b/api/src/test/java/io/opencensus/trace/SpanIdTest.java
@@ -52,6 +52,13 @@
   }
 
   @Test
+  public void fromLowerBase16_WithOffset() {
+    assertThat(SpanId.fromLowerBase16("XX0000000000000000AA", 2)).isEqualTo(SpanId.INVALID);
+    assertThat(SpanId.fromLowerBase16("YY0000000000000061BB", 2)).isEqualTo(first);
+    assertThat(SpanId.fromLowerBase16("ZZff00000000000041CC", 2)).isEqualTo(second);
+  }
+
+  @Test
   public void toLowerBase16() {
     assertThat(SpanId.INVALID.toLowerBase16()).isEqualTo("0000000000000000");
     assertThat(first.toLowerBase16()).isEqualTo("0000000000000061");
@@ -65,14 +72,14 @@
   }
 
   @Test
-  public void traceId_CompareTo() {
+  public void spanId_CompareTo() {
     assertThat(first.compareTo(second)).isGreaterThan(0);
     assertThat(second.compareTo(first)).isLessThan(0);
     assertThat(first.compareTo(SpanId.fromBytes(firstBytes))).isEqualTo(0);
   }
 
   @Test
-  public void traceId_EqualsAndHashCode() {
+  public void spanId_EqualsAndHashCode() {
     EqualsTester tester = new EqualsTester();
     tester.addEqualityGroup(SpanId.INVALID, SpanId.INVALID);
     tester.addEqualityGroup(first, SpanId.fromBytes(Arrays.copyOf(firstBytes, firstBytes.length)));
@@ -82,7 +89,7 @@
   }
 
   @Test
-  public void traceId_ToString() {
+  public void spanId_ToString() {
     assertThat(SpanId.INVALID.toString()).contains("0000000000000000");
     assertThat(first.toString()).contains("0000000000000061");
     assertThat(second.toString()).contains("ff00000000000041");
diff --git a/api/src/test/java/io/opencensus/trace/SpanTest.java b/api/src/test/java/io/opencensus/trace/SpanTest.java
index f7546ca..0b9b862 100644
--- a/api/src/test/java/io/opencensus/trace/SpanTest.java
+++ b/api/src/test/java/io/opencensus/trace/SpanTest.java
@@ -17,8 +17,8 @@
 package io.opencensus.trace;
 
 import static com.google.common.truth.Truth.assertThat;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 
 import java.util.Collections;
diff --git a/api/src/test/java/io/opencensus/trace/TraceComponentTest.java b/api/src/test/java/io/opencensus/trace/TraceComponentTest.java
index 1c3f07d..f10478e 100644
--- a/api/src/test/java/io/opencensus/trace/TraceComponentTest.java
+++ b/api/src/test/java/io/opencensus/trace/TraceComponentTest.java
@@ -31,13 +31,14 @@
 public class TraceComponentTest {
   @Test
   public void defaultTracer() {
-    assertThat(TraceComponent.newNoopTraceComponent().getTracer()).isSameAs(Tracer.getNoopTracer());
+    assertThat(TraceComponent.newNoopTraceComponent().getTracer())
+        .isSameInstanceAs(Tracer.getNoopTracer());
   }
 
   @Test
   public void defaultBinaryPropagationHandler() {
     assertThat(TraceComponent.newNoopTraceComponent().getPropagationComponent())
-        .isSameAs(PropagationComponent.getNoopPropagationComponent());
+        .isSameInstanceAs(PropagationComponent.getNoopPropagationComponent());
   }
 
   @Test
@@ -54,6 +55,6 @@
   @Test
   public void defaultTraceConfig() {
     assertThat(TraceComponent.newNoopTraceComponent().getTraceConfig())
-        .isSameAs(TraceConfig.getNoopTraceConfig());
+        .isSameInstanceAs(TraceConfig.getNoopTraceConfig());
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/TraceIdTest.java b/api/src/test/java/io/opencensus/trace/TraceIdTest.java
index c8b5dc8..f292c82 100644
--- a/api/src/test/java/io/opencensus/trace/TraceIdTest.java
+++ b/api/src/test/java/io/opencensus/trace/TraceIdTest.java
@@ -53,6 +53,12 @@
   }
 
   @Test
+  public void getLowerLong() {
+    assertThat(first.getLowerLong()).isEqualTo(0);
+    assertThat(second.getLowerLong()).isEqualTo(-0xFF00000000000000L);
+  }
+
+  @Test
   public void fromLowerBase16() {
     assertThat(TraceId.fromLowerBase16("00000000000000000000000000000000"))
         .isEqualTo(TraceId.INVALID);
@@ -61,6 +67,15 @@
   }
 
   @Test
+  public void fromLowerBase16_WithOffset() {
+    assertThat(TraceId.fromLowerBase16("XX00000000000000000000000000000000CC", 2))
+        .isEqualTo(TraceId.INVALID);
+    assertThat(TraceId.fromLowerBase16("YY00000000000000000000000000000061AA", 2)).isEqualTo(first);
+    assertThat(TraceId.fromLowerBase16("ZZff000000000000000000000000000041BB", 2))
+        .isEqualTo(second);
+  }
+
+  @Test
   public void toLowerBase16() {
     assertThat(TraceId.INVALID.toLowerBase16()).isEqualTo("00000000000000000000000000000000");
     assertThat(first.toLowerBase16()).isEqualTo("00000000000000000000000000000061");
diff --git a/api/src/test/java/io/opencensus/trace/TraceOptionsTest.java b/api/src/test/java/io/opencensus/trace/TraceOptionsTest.java
index 3c46d09..1367bc9 100644
--- a/api/src/test/java/io/opencensus/trace/TraceOptionsTest.java
+++ b/api/src/test/java/io/opencensus/trace/TraceOptionsTest.java
@@ -56,6 +56,13 @@
   }
 
   @Test
+  public void toFromBase16() {
+    assertThat(TraceOptions.fromLowerBase16("ff", 0).toLowerBase16()).isEqualTo("ff");
+    assertThat(TraceOptions.fromLowerBase16("01", 0).toLowerBase16()).isEqualTo("01");
+    assertThat(TraceOptions.fromLowerBase16("06", 0).toLowerBase16()).isEqualTo("06");
+  }
+
+  @Test
   @SuppressWarnings("deprecation")
   public void deprecated_fromBytes() {
     assertThat(TraceOptions.fromBytes(new byte[] {FIRST_BYTE}).getByte()).isEqualTo(FIRST_BYTE);
diff --git a/api/src/test/java/io/opencensus/trace/TracerTest.java b/api/src/test/java/io/opencensus/trace/TracerTest.java
index 58dd4bb..b931953 100644
--- a/api/src/test/java/io/opencensus/trace/TracerTest.java
+++ b/api/src/test/java/io/opencensus/trace/TracerTest.java
@@ -17,7 +17,7 @@
 package io.opencensus.trace;
 
 import static com.google.common.truth.Truth.assertThat;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verifyZeroInteractions;
 import static org.mockito.Mockito.when;
 
@@ -58,54 +58,54 @@
 
   @Test
   public void getCurrentSpan_WithSpan() {
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     Scope ws = noopTracer.withSpan(span);
     try {
-      assertThat(noopTracer.getCurrentSpan()).isSameAs(span);
+      assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(span);
     } finally {
       ws.close();
     }
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void wrapRunnable() {
     Runnable runnable;
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     runnable =
         tracer.withSpan(
             span,
             new Runnable() {
               @Override
               public void run() {
-                assertThat(noopTracer.getCurrentSpan()).isSameAs(span);
+                assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(span);
               }
             });
     // When we run the runnable we will have the span in the current Context.
     runnable.run();
     verifyZeroInteractions(span);
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void wrapCallable() throws Exception {
     final Object ret = new Object();
     Callable<Object> callable;
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
     callable =
         tracer.withSpan(
             span,
             new Callable<Object>() {
               @Override
               public Object call() throws Exception {
-                assertThat(noopTracer.getCurrentSpan()).isSameAs(span);
+                assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(span);
                 return ret;
               }
             });
     // When we call the callable we will have the span in the current Context.
     assertThat(callable.call()).isEqualTo(ret);
     verifyZeroInteractions(span);
-    assertThat(noopTracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test(expected = NullPointerException.class)
@@ -115,7 +115,7 @@
 
   @Test
   public void defaultSpanBuilderWithName() {
-    assertThat(noopTracer.spanBuilder(SPAN_NAME).startSpan()).isSameAs(BlankSpan.INSTANCE);
+    assertThat(noopTracer.spanBuilder(SPAN_NAME).startSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test(expected = NullPointerException.class)
@@ -126,7 +126,7 @@
   @Test
   public void defaultSpanBuilderWithParentAndName() {
     assertThat(noopTracer.spanBuilderWithExplicitParent(SPAN_NAME, null).startSpan())
-        .isSameAs(BlankSpan.INSTANCE);
+        .isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test(expected = NullPointerException.class)
@@ -137,23 +137,23 @@
   @Test
   public void defaultSpanBuilderWithRemoteParent_NullParent() {
     assertThat(noopTracer.spanBuilderWithRemoteParent(SPAN_NAME, null).startSpan())
-        .isSameAs(BlankSpan.INSTANCE);
+        .isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void defaultSpanBuilderWithRemoteParent() {
     assertThat(noopTracer.spanBuilderWithRemoteParent(SPAN_NAME, SpanContext.INVALID).startSpan())
-        .isSameAs(BlankSpan.INSTANCE);
+        .isSameInstanceAs(BlankSpan.INSTANCE);
   }
 
   @Test
   public void startSpanWithParentFromContext() {
     Scope ws = tracer.withSpan(span);
     try {
-      assertThat(tracer.getCurrentSpan()).isSameAs(span);
+      assertThat(tracer.getCurrentSpan()).isSameInstanceAs(span);
       when(tracer.spanBuilderWithExplicitParent(same(SPAN_NAME), same(span)))
           .thenReturn(spanBuilder);
-      assertThat(tracer.spanBuilder(SPAN_NAME)).isSameAs(spanBuilder);
+      assertThat(tracer.spanBuilder(SPAN_NAME)).isSameInstanceAs(spanBuilder);
     } finally {
       ws.close();
     }
@@ -163,10 +163,10 @@
   public void startSpanWithInvalidParentFromContext() {
     Scope ws = tracer.withSpan(BlankSpan.INSTANCE);
     try {
-      assertThat(tracer.getCurrentSpan()).isSameAs(BlankSpan.INSTANCE);
+      assertThat(tracer.getCurrentSpan()).isSameInstanceAs(BlankSpan.INSTANCE);
       when(tracer.spanBuilderWithExplicitParent(same(SPAN_NAME), same(BlankSpan.INSTANCE)))
           .thenReturn(spanBuilder);
-      assertThat(tracer.spanBuilder(SPAN_NAME)).isSameAs(spanBuilder);
+      assertThat(tracer.spanBuilder(SPAN_NAME)).isSameInstanceAs(spanBuilder);
     } finally {
       ws.close();
     }
diff --git a/api/src/test/java/io/opencensus/trace/TracingTest.java b/api/src/test/java/io/opencensus/trace/TracingTest.java
index e7c93a9..233d5cc 100644
--- a/api/src/test/java/io/opencensus/trace/TracingTest.java
+++ b/api/src/test/java/io/opencensus/trace/TracingTest.java
@@ -61,13 +61,13 @@
 
   @Test
   public void defaultTracer() {
-    assertThat(Tracing.getTracer()).isSameAs(Tracer.getNoopTracer());
+    assertThat(Tracing.getTracer()).isSameInstanceAs(Tracer.getNoopTracer());
   }
 
   @Test
   public void defaultBinaryPropagationHandler() {
     assertThat(Tracing.getPropagationComponent())
-        .isSameAs(PropagationComponent.getNoopPropagationComponent());
+        .isSameInstanceAs(PropagationComponent.getNoopPropagationComponent());
   }
 
   @Test
@@ -78,6 +78,6 @@
 
   @Test
   public void defaultTraceConfig() {
-    assertThat(Tracing.getTraceConfig()).isSameAs(TraceConfig.getNoopTraceConfig());
+    assertThat(Tracing.getTraceConfig()).isSameInstanceAs(TraceConfig.getNoopTraceConfig());
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/propagation/PropagationComponentTest.java b/api/src/test/java/io/opencensus/trace/propagation/PropagationComponentTest.java
index ba64e98..81cabef 100644
--- a/api/src/test/java/io/opencensus/trace/propagation/PropagationComponentTest.java
+++ b/api/src/test/java/io/opencensus/trace/propagation/PropagationComponentTest.java
@@ -31,6 +31,17 @@
   @Test
   public void implementationOfBinaryFormat() {
     assertThat(propagationComponent.getBinaryFormat())
-        .isEqualTo(BinaryFormat.getNoopBinaryFormat());
+        .isSameInstanceAs(BinaryFormat.getNoopBinaryFormat());
+  }
+
+  @Test
+  public void implementationOfB3Format() {
+    assertThat(propagationComponent.getB3Format()).isSameInstanceAs(TextFormat.getNoopTextFormat());
+  }
+
+  @Test
+  public void implementationOfTraceContextFormat() {
+    assertThat(propagationComponent.getTraceContextFormat())
+        .isSameInstanceAs(TextFormat.getNoopTextFormat());
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/propagation/TextFormatTest.java b/api/src/test/java/io/opencensus/trace/propagation/TextFormatTest.java
index c2e6e12..e4e05ad 100644
--- a/api/src/test/java/io/opencensus/trace/propagation/TextFormatTest.java
+++ b/api/src/test/java/io/opencensus/trace/propagation/TextFormatTest.java
@@ -70,6 +70,6 @@
                     return null;
                   }
                 }))
-        .isSameAs(SpanContext.INVALID);
+        .isSameInstanceAs(SpanContext.INVALID);
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/samplers/SamplersTest.java b/api/src/test/java/io/opencensus/trace/samplers/SamplersTest.java
index 7a46e97..243119d 100644
--- a/api/src/test/java/io/opencensus/trace/samplers/SamplersTest.java
+++ b/api/src/test/java/io/opencensus/trace/samplers/SamplersTest.java
@@ -270,12 +270,12 @@
 
   @Test
   public void probabilitySampler_getDescription() {
-    assertThat((Samplers.probabilitySampler(0.5)).getDescription())
+    assertThat(Samplers.probabilitySampler(0.5).getDescription())
         .isEqualTo(String.format("ProbabilitySampler{%.6f}", 0.5));
   }
 
   @Test
   public void probabilitySampler_ToString() {
-    assertThat((Samplers.probabilitySampler(0.5)).toString()).contains("0.5");
+    assertThat(Samplers.probabilitySampler(0.5).toString()).contains("0.5");
   }
 }
diff --git a/api/src/test/java/io/opencensus/trace/unsafe/ContextUtilsTest.java b/api/src/test/java/io/opencensus/trace/unsafe/ContextUtilsTest.java
new file mode 100644
index 0000000..97a9cfc
--- /dev/null
+++ b/api/src/test/java/io/opencensus/trace/unsafe/ContextUtilsTest.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.trace.unsafe;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.trace.BlankSpan;
+import io.opencensus.trace.ContextHandle;
+import io.opencensus.trace.Span;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link ContextUtils}. */
+@RunWith(JUnit4.class)
+public class ContextUtilsTest {
+
+  @Test
+  public void testGetCurrentSpan_DefaultContext() {
+    Span span = ContextHandleUtils.getValue(ContextHandleUtils.currentContext());
+    assertThat(span).isEqualTo(BlankSpan.INSTANCE);
+  }
+
+  @Test
+  public void testGetCurrentSpan_ContextSetToNull() {
+    ContextHandle orig =
+        ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), null).attach();
+    try {
+      Span span = ContextHandleUtils.getValue(ContextHandleUtils.currentContext());
+      // ContextUtils.getValue always returns non-null.
+      assertThat(span).isEqualTo(BlankSpan.INSTANCE);
+    } finally {
+      ContextHandleUtils.currentContext().detach(orig);
+    }
+  }
+
+  @Test
+  public void testTryExtractGrpcContext_WillNotThrow() {
+    assertThat(ContextHandleUtils.tryExtractGrpcContext(ContextHandleUtils.currentContext()))
+        .isNotNull();
+  }
+}
diff --git a/appveyor.yml b/appveyor.yml
index 34493a9..6d34d16 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -5,6 +5,6 @@
   # The Gradle build script runs the integration tests of contrib/agent using different Java
   # versions. %JAVA_HOMES% lists the home directories of the JDK installations used for
   # integration testing. Also see https://www.appveyor.com/docs/build-environment/#java.
-  - set JAVA_HOMES=C:\Program Files\Java\jdk1.6.0\jre;C:\Program Files\Java\jdk1.7.0\jre;C:\Program Files\Java\jdk1.8.0\jre
+  - set JAVA_HOMES=C:\Program Files\Java\jdk1.8.0\jre
   - gradlew.bat clean assemble check --stacktrace
   - pushd examples && gradlew.bat clean assemble check --stacktrace && popd
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordBatchedBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordBatchedBenchmark.java
new file mode 100644
index 0000000..db24ab7
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordBatchedBenchmark.java
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.stats;
+
+import io.opencensus.benchmarks.tags.TagsBenchmarksUtil;
+import io.opencensus.stats.MeasureMap;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tagger;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.stats.StatsRecorder}. */
+public class RecordBatchedBenchmark {
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    @Param({"0", "1", "2", "3", "6", "8"})
+    int numValues;
+
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    private StatsRecorder recorder;
+    private Tagger tagger;
+    private TagContext tags;
+
+    @Setup
+    public void setup() throws Exception {
+      ViewManager manager = StatsBenchmarksUtil.getViewManager(implementation);
+      recorder = StatsBenchmarksUtil.getStatsRecorder(implementation);
+      tagger = TagsBenchmarksUtil.getTagger(implementation);
+      tags = TagsBenchmarksUtil.createTagContext(tagger.emptyBuilder(), 1);
+      for (int i = 0; i < numValues; i++) {
+        manager.registerView(StatsBenchmarksUtil.DOUBLE_COUNT_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.LONG_COUNT_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.DOUBLE_SUM_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.LONG_SUM_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.LONG_DISTRIBUTION_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.DOUBLE_LASTVALUE_VIEWS[i]);
+        manager.registerView(StatsBenchmarksUtil.LONG_LASTVALUE_VIEWS[i]);
+      }
+    }
+  }
+
+  /** Record batched double count measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedDoubleCount(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.DOUBLE_COUNT_MEASURES[i], (double) i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched long count measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedLongCount(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.LONG_COUNT_MEASURES[i], i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched double sum measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedDoubleSum(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.DOUBLE_SUM_MEASURES[i], (double) i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched long sum measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedLongSum(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.LONG_SUM_MEASURES[i], i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched double distribution measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedDoubleDistribution(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[i], (double) i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched ling distribution measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedLongDistribution(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[i], i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched double last value measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedDoubleLastValue(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.DOUBLE_LASTVALUE_MEASURES[i], (double) i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+
+  /** Record batched long last value measures. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordBatchedLongLastValue(Data data) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    for (int i = 0; i < data.numValues; i++) {
+      map.put(StatsBenchmarksUtil.LONG_LASTVALUE_MEASURES[i], i);
+    }
+    map.record(data.tags);
+    return map;
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordDifferentTagValuesBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordDifferentTagValuesBenchmark.java
new file mode 100644
index 0000000..947a8a6
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordDifferentTagValuesBenchmark.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.stats;
+
+import io.opencensus.benchmarks.tags.TagsBenchmarksUtil;
+import io.opencensus.stats.Measure;
+import io.opencensus.stats.MeasureMap;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tagger;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.trace.Tagger}. */
+public class RecordDifferentTagValuesBenchmark {
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    @Param({"0", "1", "2", "3", "6", "8"})
+    int numTags;
+
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    private StatsRecorder recorder;
+    private ViewManager manager;
+    private Tagger tagger;
+    private List<TagContext> contexts;
+
+    @Setup
+    public void setup() throws Exception {
+      manager = StatsBenchmarksUtil.getViewManager(implementation);
+      recorder = StatsBenchmarksUtil.getStatsRecorder(implementation);
+      tagger = TagsBenchmarksUtil.getTagger(implementation);
+      contexts = createContexts(numTags);
+      manager.registerView(StatsBenchmarksUtil.DOUBLE_COUNT_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.LONG_COUNT_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.DOUBLE_SUM_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.LONG_SUM_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.LONG_DISTRIBUTION_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.DOUBLE_LASTVALUE_VIEWS[0]);
+      manager.registerView(StatsBenchmarksUtil.LONG_LASTVALUE_VIEWS[0]);
+    }
+
+    // creates 'size' tag contexts mapping "key0" -> "valueN"
+    private List<TagContext> createContexts(int size) {
+      TagContext[] contexts = new TagContext[size];
+      for (int i = 0; i < size; i++) {
+        contexts[i] =
+            tagger
+                .emptyBuilder()
+                .put(
+                    TagsBenchmarksUtil.TAG_KEYS.get(0),
+                    TagsBenchmarksUtil.TAG_VALUES.get(i),
+                    TagsBenchmarksUtil.UNLIMITED_PROPAGATION)
+                .build();
+      }
+      return Arrays.asList(contexts);
+    }
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleCount(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_COUNT_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongCount(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_COUNT_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleSum(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_SUM_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongSum(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_SUM_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleDistribution(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongDistribution(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleLastValue(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_LASTVALUE_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongLastValue(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_LASTVALUE_MEASURES[0], 11);
+  }
+
+  private static MeasureMap record(Data data, Measure.MeasureLong measure, int value) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    map.put(measure, value);
+    for (TagContext tags : data.contexts) {
+      map.record(tags);
+    }
+    return map;
+  }
+
+  private static MeasureMap record(Data data, Measure.MeasureDouble measure, double value) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    map.put(measure, value);
+    for (TagContext tags : data.contexts) {
+      map.record(tags);
+    }
+    return map;
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordMultipleViewsBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordMultipleViewsBenchmark.java
new file mode 100644
index 0000000..1345a8b
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/RecordMultipleViewsBenchmark.java
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.stats;
+
+import io.opencensus.benchmarks.tags.TagsBenchmarksUtil;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Measure;
+import io.opencensus.stats.MeasureMap;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagContextBuilder;
+import io.opencensus.tags.Tagger;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.trace.Tagger}. */
+public class RecordMultipleViewsBenchmark {
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    @Param({"0", "1", "2", "3", "6", "8"})
+    int numViews;
+
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    private StatsRecorder recorder;
+    private Tagger tagger;
+    private TagContext tagContext;
+
+    @Setup
+    public void setup() throws Exception {
+      ViewManager manager = StatsBenchmarksUtil.getViewManager(implementation);
+      recorder = StatsBenchmarksUtil.getStatsRecorder(implementation);
+      tagger = TagsBenchmarksUtil.getTagger(implementation);
+      tagContext = createContext(numViews);
+
+      for (int i = 0; i < numViews; i++) {
+        // count
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "DC" + i,
+                StatsBenchmarksUtil.DOUBLE_COUNT_MEASURES[0],
+                Aggregation.Count.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "LC" + i,
+                StatsBenchmarksUtil.LONG_COUNT_MEASURES[0],
+                Aggregation.Count.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        // sum
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "DS" + i,
+                StatsBenchmarksUtil.DOUBLE_SUM_MEASURES[0],
+                Aggregation.Sum.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "LS" + i,
+                StatsBenchmarksUtil.LONG_SUM_MEASURES[0],
+                Aggregation.Sum.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        // distribution
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "DD" + i,
+                StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[0],
+                StatsBenchmarksUtil.DISTRIBUTION,
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "LD" + i,
+                StatsBenchmarksUtil.LONG_DISTRIBUTION_MEASURES[0],
+                StatsBenchmarksUtil.DISTRIBUTION,
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        // last value
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "DL" + i,
+                StatsBenchmarksUtil.DOUBLE_LASTVALUE_MEASURES[0],
+                Aggregation.LastValue.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+        manager.registerView(
+            StatsBenchmarksUtil.createView(
+                "LL" + i,
+                StatsBenchmarksUtil.LONG_LASTVALUE_MEASURES[0],
+                Aggregation.LastValue.create(),
+                TagsBenchmarksUtil.TAG_KEYS.get(i)));
+      }
+    }
+
+    // creates tag context with n tags mapping "keyN" -> "value0"
+    private TagContext createContext(int size) {
+      TagContextBuilder builder = tagger.emptyBuilder();
+      for (int i = 0; i < size; i++) {
+        builder.put(
+            TagsBenchmarksUtil.TAG_KEYS.get(i),
+            TagsBenchmarksUtil.TAG_VALUES.get(0),
+            TagsBenchmarksUtil.UNLIMITED_PROPAGATION);
+      }
+      return builder.build();
+    }
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleCount(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_COUNT_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongCount(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_COUNT_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleSum(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_SUM_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongSum(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_SUM_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleDistribution(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongDistribution(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_DISTRIBUTION_MEASURES[0], 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordDoubleLastValue(Data data) {
+    return record(data, StatsBenchmarksUtil.DOUBLE_LASTVALUE_MEASURES[0], (double) 11);
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MeasureMap recordLongLastValue(Data data) {
+    return record(data, StatsBenchmarksUtil.LONG_LASTVALUE_MEASURES[0], 11);
+  }
+
+  private static MeasureMap record(Data data, Measure.MeasureLong measure, int value) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    map.put(measure, value).record(data.tagContext);
+    return map;
+  }
+
+  private static MeasureMap record(Data data, Measure.MeasureDouble measure, double value) {
+    MeasureMap map = data.recorder.newMeasureMap();
+    map.put(measure, value).record(data.tagContext);
+    return map;
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/StatsBenchmarksUtil.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/StatsBenchmarksUtil.java
new file mode 100644
index 0000000..ee0c52f
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/stats/StatsBenchmarksUtil.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.stats;
+
+import static io.opencensus.benchmarks.tags.TagsBenchmarksUtil.TAG_KEYS;
+
+import io.opencensus.impl.stats.StatsComponentImpl;
+import io.opencensus.impllite.stats.StatsComponentImplLite;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagKey;
+import java.util.Arrays;
+
+/** Util class for Benchmarks. */
+final class StatsBenchmarksUtil {
+  private static final StatsComponentImpl statsComponentImpl = new StatsComponentImpl();
+  private static final StatsComponentImplLite statsComponentImplLite = new StatsComponentImplLite();
+
+  private static final int MEASURES = 8;
+  private static final int VIEWS = 8;
+
+  static final Aggregation.Distribution DISTRIBUTION =
+      Aggregation.Distribution.create(
+          BucketBoundaries.create(Arrays.asList(0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0)));
+
+  static final Measure.MeasureDouble[] DOUBLE_COUNT_MEASURES =
+      createMeasureDoubles(MEASURES, "Count");
+  static final Measure.MeasureLong[] LONG_COUNT_MEASURES = createMeasureLongs(MEASURES, "Count");
+
+  static final Measure.MeasureDouble[] DOUBLE_SUM_MEASURES = createMeasureDoubles(MEASURES, "Sum");
+  static final Measure.MeasureLong[] LONG_SUM_MEASURES = createMeasureLongs(MEASURES, "Sum");
+
+  static final Measure.MeasureDouble[] DOUBLE_DISTRIBUTION_MEASURES =
+      createMeasureDoubles(MEASURES, "Distribution");
+  static final Measure.MeasureLong[] LONG_DISTRIBUTION_MEASURES =
+      createMeasureLongs(MEASURES, "Distribution");
+
+  static final Measure.MeasureDouble[] DOUBLE_LASTVALUE_MEASURES =
+      createMeasureDoubles(MEASURES, "LastValue");
+  static final Measure.MeasureLong[] LONG_LASTVALUE_MEASURES =
+      createMeasureLongs(MEASURES, "LastValue");
+
+  static final View[] DOUBLE_COUNT_VIEWS =
+      createViews(VIEWS, DOUBLE_COUNT_MEASURES, Aggregation.Count.create(), TAG_KEYS.get(0));
+  static final View[] LONG_COUNT_VIEWS =
+      createViews(VIEWS, LONG_COUNT_MEASURES, Aggregation.Count.create(), TAG_KEYS.get(0));
+
+  static final View[] DOUBLE_SUM_VIEWS =
+      createViews(VIEWS, DOUBLE_SUM_MEASURES, Aggregation.Sum.create(), TAG_KEYS.get(0));
+  static final View[] LONG_SUM_VIEWS =
+      createViews(VIEWS, LONG_SUM_MEASURES, Aggregation.Sum.create(), TAG_KEYS.get(0));
+
+  static final View[] DOUBLE_DISTRIBUTION_VIEWS =
+      createViews(VIEWS, DOUBLE_DISTRIBUTION_MEASURES, DISTRIBUTION, TAG_KEYS.get(0));
+  static final View[] LONG_DISTRIBUTION_VIEWS =
+      createViews(VIEWS, LONG_DISTRIBUTION_MEASURES, DISTRIBUTION, TAG_KEYS.get(0));
+
+  static final View[] DOUBLE_LASTVALUE_VIEWS =
+      createViews(
+          VIEWS, DOUBLE_LASTVALUE_MEASURES, Aggregation.LastValue.create(), TAG_KEYS.get(0));
+  static final View[] LONG_LASTVALUE_VIEWS =
+      createViews(VIEWS, LONG_LASTVALUE_MEASURES, Aggregation.LastValue.create(), TAG_KEYS.get(0));
+
+  static StatsRecorder getStatsRecorder(String implementation) {
+    if (implementation.equals("impl")) {
+      // We can return the global tracer here because if impl is linked the global tracer will be
+      // the impl one.
+      // TODO(bdrutu): Make everything not be a singleton (disruptor, etc.) and use a new
+      // TraceComponentImpl similar to TraceComponentImplLite.
+      return statsComponentImpl.getStatsRecorder();
+    } else if (implementation.equals("impl-lite")) {
+      return statsComponentImplLite.getStatsRecorder();
+    } else {
+      throw new RuntimeException("Invalid stats recorder implementation specified.");
+    }
+  }
+
+  static ViewManager getViewManager(String implementation) {
+    if (implementation.equals("impl")) {
+      // We can return the global tracer here because if impl is linked the global tracer will be
+      // the impl one.
+      // TODO(bdrutu): Make everything not be a singleton (disruptor, etc.) and use a new
+      // TraceComponentImpl similar to TraceComponentImplLite.
+      return statsComponentImpl.getViewManager();
+    } else if (implementation.equals("impl-lite")) {
+      return statsComponentImplLite.getViewManager();
+    } else {
+      throw new RuntimeException("Invalid view manager implementation specified.");
+    }
+  }
+
+  private static View[] createViews(
+      int size, Measure[] measures, Aggregation aggregation, TagKey... keys) {
+    View[] views = new View[size];
+    for (int i = 0; i < size; i++) {
+      views[i] = createView(measures[i].getName(), measures[i], aggregation, keys);
+    }
+    return views;
+  }
+
+  static View createView(String name, Measure measure, Aggregation aggregation, TagKey... keys) {
+    return View.create(View.Name.create(name), "", measure, aggregation, Arrays.asList(keys));
+  }
+
+  private static Measure.MeasureDouble[] createMeasureDoubles(int size, String name) {
+    Measure.MeasureDouble[] measures = new Measure.MeasureDouble[size];
+    for (int i = 0; i < size; i++) {
+      measures[i] = Measure.MeasureDouble.create(name + "_MD" + i, "", "ns");
+    }
+    return measures;
+  }
+
+  private static Measure.MeasureLong[] createMeasureLongs(int size, String name) {
+    Measure.MeasureLong[] measures = new Measure.MeasureLong[size];
+    for (int i = 0; i < size; i++) {
+      measures[i] = Measure.MeasureLong.create(name + "_ML" + i, "", "ns");
+    }
+    return measures;
+  }
+
+  // Avoid instances of this class.
+  private StatsBenchmarksUtil() {}
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/NestedTagContextCreationBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/NestedTagContextCreationBenchmark.java
new file mode 100644
index 0000000..cfe5df0
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/NestedTagContextCreationBenchmark.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.tags;
+
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tagger;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.trace.TagContextBuilder}. */
+public class NestedTagContextCreationBenchmark {
+
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    private Tagger tagger;
+    private TagContext baseTagContext;
+
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    @Param({"0", "1", "2", "4", "8", "16"})
+    int numTags;
+
+    @Param({"0", "1", "2", "4", "8", "16"})
+    int numBaseTags;
+
+    @Setup
+    public void setup() {
+      tagger = TagsBenchmarksUtil.getTagger(implementation);
+      baseTagContext = TagsBenchmarksUtil.createTagContext(tagger.emptyBuilder(), numBaseTags);
+    }
+  }
+
+  /** Build nested tag context. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagContext timeNestedTagContext(Data data) {
+    return TagsBenchmarksUtil.createTagContext(
+        data.tagger.toBuilder(data.baseTagContext), data.numTags);
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagContextBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagContextBenchmark.java
new file mode 100644
index 0000000..382b705
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagContextBenchmark.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.tags;
+
+import io.opencensus.common.Scope;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.propagation.TagContextBinarySerializer;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+/** Benchmarks for {@link io.opencensus.trace.Tagger}. */
+public class TagContextBenchmark {
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    @Param({"0", "1", "2", "4", "8", "16"})
+    int numTags;
+
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    private Scope scope;
+    private Tagger tagger;
+    private TagContextBinarySerializer serializer;
+    private TagContext tagContext;
+    private byte[] serializedTagContext;
+
+    @Setup
+    public void setup() throws Exception {
+      tagger = TagsBenchmarksUtil.getTagger(implementation);
+      serializer = TagsBenchmarksUtil.getTagContextBinarySerializer(implementation);
+      tagContext = TagsBenchmarksUtil.createTagContext(tagger.emptyBuilder(), numTags);
+      scope = tagger.withTagContext(tagContext);
+      serializedTagContext = serializer.toByteArray(tagContext);
+    }
+
+    @TearDown
+    public void tearDown() {
+      scope.close();
+    }
+  }
+
+  /** Create a tag context. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagContext tagContextCreation(Data data) {
+    return TagsBenchmarksUtil.createTagContext(data.tagger.emptyBuilder(), data.numTags);
+  }
+
+  /** Open and close a tag context scope. */
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Scope scopeTagContext(Data data) {
+    Scope scope = data.tagger.withTagContext(data.tagContext);
+    scope.close();
+    return scope;
+  }
+
+  /** Get the current tag context. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagContext getCurrentTagContext(Data data) {
+    return data.tagger.getCurrentTagContext();
+  }
+
+  /** Serialize a tag context. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public byte[] serializeTagContext(Data data) throws Exception {
+    return data.serializer.toByteArray(data.tagContext);
+  }
+
+  /** Deserialize a tag context. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagContext deserializeTagContext(Data data) throws Exception {
+    return data.serializer.fromByteArray(data.serializedTagContext);
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmark.java
new file mode 100644
index 0000000..29d73d2
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmark.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.tags;
+
+import io.opencensus.tags.Tag;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.trace.Tagger}. */
+public class TagsBenchmark {
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    @Param({"impl", "impl-lite"})
+    String implementation;
+
+    @Param({"1", "8", "32", "128", "255"})
+    int size;
+
+    private String input;
+    private TagKey tagKey;
+    private TagValue tagValue;
+
+    @Setup
+    public void setup() throws Exception {
+      StringBuilder builder = new StringBuilder(size);
+      // build a string with characters from 'a' to 'z'
+      for (int i = 0; i < size; i++) {
+        builder.append((char) (97 + i % 26));
+      }
+      input = builder.toString();
+      tagKey = TagKey.create(input);
+      tagValue = TagValue.create(input);
+    }
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagKey tagKeyCreation(Data data) {
+    return TagKey.create("key");
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public TagValue tagValueCreation(Data data) {
+    return TagValue.create("val");
+  }
+
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Tag tagCreation(Data data) {
+    return Tag.create(data.tagKey, data.tagValue, TagsBenchmarksUtil.UNLIMITED_PROPAGATION);
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmarksUtil.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmarksUtil.java
new file mode 100644
index 0000000..3ed54ad
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/tags/TagsBenchmarksUtil.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.tags;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.implcore.tags.TagsComponentImplBase;
+import io.opencensus.impllite.tags.TagsComponentImplLite;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagContextBuilder;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.propagation.TagContextBinarySerializer;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/** Util class for Tags Benchmarks. */
+public final class TagsBenchmarksUtil {
+  private static final TagsComponentImplBase tagsComponentImplBase = new TagsComponentImplBase();
+  private static final TagsComponentImplLite tagsComponentImplLite = new TagsComponentImplLite();
+
+  public static final List<TagKey> TAG_KEYS =
+      Collections.unmodifiableList(Arrays.asList(createTagKeys(16, "key")));
+  public static final List<TagValue> TAG_VALUES =
+      Collections.unmodifiableList(Arrays.asList(createTagValues(16, "val")));
+  public static final TagMetadata UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagMetadata.TagTtl.UNLIMITED_PROPAGATION);
+
+  /** Gets the {@link Tagger} for the specified 'implementation'. */
+  @VisibleForTesting
+  public static Tagger getTagger(String implementation) {
+    if (implementation.equals("impl")) {
+      // We can return the global tracer here because if impl is linked the global tracer will be
+      // the impl one.
+      // TODO(bdrutu): Make everything not be a singleton (disruptor, etc.) and use a new
+      // TraceComponentImpl similar to TraceComponentImplLite.
+      return tagsComponentImplBase.getTagger();
+    } else if (implementation.equals("impl-lite")) {
+      return tagsComponentImplLite.getTagger();
+    } else {
+      throw new RuntimeException("Invalid tagger implementation specified.");
+    }
+  }
+
+  /** Gets the {@link TagContextBinarySerializer} for the specified 'implementation'. */
+  @VisibleForTesting
+  public static TagContextBinarySerializer getTagContextBinarySerializer(String implementation) {
+    if (implementation.equals("impl")) {
+      // We can return the global tracer here because if impl is linked the global tracer will be
+      // the impl one.
+      // TODO(bdrutu): Make everything not be a singleton (disruptor, etc.) and use a new
+      // TraceComponentImpl similar to TraceComponentImplLite.
+      return tagsComponentImplBase.getTagPropagationComponent().getBinarySerializer();
+    } else if (implementation.equals("impl-lite")) {
+      return tagsComponentImplLite.getTagPropagationComponent().getBinarySerializer();
+    } else {
+      throw new RuntimeException("Invalid binary serializer implementation specified.");
+    }
+  }
+
+  /** Creates an array of TagKeys of 'size' with 'name' prefix. */
+  @VisibleForTesting
+  public static TagKey[] createTagKeys(int size, String name) {
+    TagKey[] keys = new TagKey[size];
+    for (int i = 0; i < size; i++) {
+      keys[i] = TagKey.create(name + i);
+    }
+    return keys;
+  }
+
+  /** Creates an array of TagValues of 'size' with 'name' prefix. */
+  @VisibleForTesting
+  public static TagValue[] createTagValues(int size, String name) {
+    TagValue[] values = new TagValue[size];
+    for (int i = 0; i < size; i++) {
+      values[i] = TagValue.create(name + i);
+    }
+    return values;
+  }
+
+  /** Adds 'numTags' tags to 'tagsBuilder' and returns the associated tag context. */
+  @VisibleForTesting
+  public static TagContext createTagContext(TagContextBuilder tagsBuilder, int numTags) {
+    for (int i = 0; i < numTags; i++) {
+      tagsBuilder.put(TAG_KEYS.get(i), TAG_VALUES.get(i), UNLIMITED_PROPAGATION);
+    }
+    return tagsBuilder.build();
+  }
+
+  // Avoid instances of this class.
+  private TagsBenchmarksUtil() {}
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicDataBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicDataBenchmark.java
new file mode 100644
index 0000000..8f8ad22
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicDataBenchmark.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * ./gradlew --no-daemon -PjmhIncludeSingleClass=BasicDataBenchmark clean :opencensus-benchmarks:jmh
+ */
+
+package io.opencensus.benchmarks.trace;
+
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+/** Benchmarks for basic data structures related to trace events. */
+@State(Scope.Benchmark)
+public class BasicDataBenchmark {
+  private static final String ANNOTATION_DESCRIPTION = "MyAnnotation";
+  private static final String ATTRIBUTE_KEY = "MyAttributeKey";
+  private static final String ATTRIBUTE_VALUE_STRING = "MyAttributeValue";
+  private static final long ATTRIBUTE_VALUE_LONG = 90215;
+
+  @State(Scope.Benchmark)
+  public static class Data {
+    private AttributeValue[] attributeValues;
+    private String[] attributeKeys;
+    Map<String, AttributeValue> attributeMap;
+
+    // @Param({"impl", "impl-lite"})
+    @Param({"impl"})
+    String implementation;
+
+    @Param({"0", "1", "4", "8", "16"})
+    int size;
+
+    @Param({"string", "boolean", "long"})
+    String attributeType;
+
+    @Setup
+    public void setup() {
+      attributeValues = getAttributeValues(size, attributeType);
+      attributeKeys = new String[size];
+      attributeMap = new HashMap<>(size);
+      for (int i = 0; i < size; i++) {
+        attributeKeys[i] = ATTRIBUTE_KEY + "-" + i;
+        attributeMap.put(attributeKeys[i], attributeValues[i]);
+      }
+    }
+
+    @TearDown
+    public void doTearDown() {}
+  }
+
+  /** Create attribute values. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public AttributeValue[] createAttributeValues(Data data) {
+    return getAttributeValues(data.size, data.attributeType);
+  }
+
+  /** Create an AttributeMap. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Map<String, AttributeValue> createAttributeMap(Data data) {
+    Map<String, AttributeValue> attributeMap = new HashMap<>(data.size);
+    for (int i = 0; i < data.size; i++) {
+      attributeMap.put(data.attributeKeys[i], data.attributeValues[i]);
+    }
+    return attributeMap;
+  }
+
+  /** Create an Annotation. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Annotation createAnnotation(Data data) {
+    return Annotation.fromDescriptionAndAttributes(ANNOTATION_DESCRIPTION, data.attributeMap);
+  }
+
+  private static AttributeValue[] getAttributeValues(int size, String attributeType) {
+    AttributeValue[] attributeValues = new AttributeValue[size];
+    switch (attributeType) {
+      case "string":
+        for (int i = 0; i < size; i++) {
+          attributeValues[i] = AttributeValue.stringAttributeValue(ATTRIBUTE_VALUE_STRING + "-i");
+        }
+        break;
+      case "boolean":
+        for (int i = 0; i < size; i++) {
+          attributeValues[i] = AttributeValue.booleanAttributeValue(i % 3 == 0);
+        }
+        break;
+      case "long":
+        for (int i = 0; i < size; i++) {
+          attributeValues[i] = AttributeValue.longAttributeValue(ATTRIBUTE_VALUE_LONG + i);
+        }
+        break;
+      default:
+        throw new IllegalArgumentException("Unknown attribute type: " + attributeType);
+    }
+    return attributeValues;
+  }
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicOperationsBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicOperationsBenchmark.java
new file mode 100644
index 0000000..a4bb711
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BasicOperationsBenchmark.java
@@ -0,0 +1,305 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.trace;
+
+import io.opencensus.common.Scope;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.propagation.PropagationComponent;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+/** Benchmarks for basic trace operations. */
+@State(org.openjdk.jmh.annotations.Scope.Benchmark)
+public class BasicOperationsBenchmark {
+  private static final String TRACEPARENT_KEY = "traceparent";
+  private static final Status STATUS_OK = Status.OK;
+  private static final long MESSAGE_ID = 1042;
+  private static final Tracestate TRACESTATE_DEFAULT = Tracestate.builder().build();
+
+  @State(org.openjdk.jmh.annotations.Scope.Benchmark)
+  public static class Data {
+    private Span span;
+    private byte[] spanToDecodeBinary;
+    private String spanToDecodeText;
+    private Span spanToEncode;
+    private Span spanToScope;
+    private Span spanToSet;
+    private Span spanToEnd;
+
+    private Tracer tracer;
+    private PropagationComponent propagation;
+
+    // @Param({"impl", "impl-lite"})
+    @Param({"impl"})
+    String implementation;
+
+    @Param({"true", "false"})
+    boolean recorded;
+
+    @Param({"true", "false"})
+    boolean sampled;
+
+    @Setup
+    public void setup() {
+      tracer = BenchmarksUtil.getTracer(implementation);
+      propagation = BenchmarksUtil.getPropagationComponent(implementation);
+      span =
+          tracer
+              .spanBuilderWithExplicitParent("TopLevelSpan", null)
+              .setRecordEvents(recorded)
+              .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+              .startSpan();
+
+      spanToEncode =
+          tracer
+              .spanBuilderWithExplicitParent("SpanToEncode", span)
+              .setRecordEvents(recorded)
+              .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+              .startSpan();
+
+      spanToScope =
+          tracer
+              .spanBuilderWithExplicitParent("SpanToScope", span)
+              .setRecordEvents(recorded)
+              .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+              .startSpan();
+
+      spanToSet =
+          tracer
+              .spanBuilderWithExplicitParent("SpanToSet", span)
+              .setRecordEvents(recorded)
+              .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+              .startSpan();
+
+      spanToEnd =
+          tracer
+              .spanBuilderWithExplicitParent("SpanToEnd", span)
+              .setRecordEvents(recorded)
+              .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+              .startSpan();
+
+      spanToDecodeBinary = propagation.getBinaryFormat().toByteArray(spanToEncode.getContext());
+
+      spanToDecodeText =
+          encodeSpanContextText(propagation.getTraceContextFormat(), spanToEncode.getContext());
+    }
+
+    @TearDown
+    public void doTearDown() {
+      span.end();
+      spanToEncode.end();
+      spanToScope.end();
+      spanToSet.end();
+    }
+  }
+
+  /** Create a root span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span createRootSpan(Data data) {
+    Span span =
+        data.tracer
+            .spanBuilderWithExplicitParent("RootSpan", null)
+            .setRecordEvents(data.recorded)
+            .setSampler(data.sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+            .startSpan();
+    span.end();
+    return span;
+  }
+
+  /** Create a child span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span createSpanWithExplicitParent(Data data) {
+    Span span =
+        data.tracer
+            .spanBuilderWithExplicitParent("ChildSpan", data.span)
+            .setRecordEvents(data.recorded)
+            .setSampler(data.sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+            .startSpan();
+    span.end();
+    return span;
+  }
+
+  /** Create a child span with a remote parent. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span createSpanWithRemoteParent(Data data) {
+    Span span =
+        data.tracer
+            .spanBuilderWithRemoteParent("ChildSpanFromRemoteParent", data.span.getContext())
+            .setRecordEvents(data.recorded)
+            .setSampler(data.sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+            .startSpan();
+    span.end();
+    return span;
+  }
+
+  /** Create a child span from the current span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span createSpanWithCurrentSpan(Data data) {
+    Span span =
+        data.tracer
+            .spanBuilder("ChildSpanFromCurrent")
+            .setRecordEvents(data.recorded)
+            .setSampler(data.sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+            .startSpan();
+    span.end();
+    return span;
+  }
+
+  /** Create a link. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Link createLink(Data data) {
+    return Link.fromSpanContext(
+        SpanContext.create(
+            TraceId.fromBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0}),
+            SpanId.fromBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 0}),
+            TraceOptions.DEFAULT,
+            TRACESTATE_DEFAULT),
+        Link.Type.PARENT_LINKED_SPAN);
+  }
+
+  /** Create a message event. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public MessageEvent createMessageEvent(Data data) {
+    return MessageEvent.builder(MessageEvent.Type.SENT, MESSAGE_ID).build();
+  }
+
+  /** Scope/Unscope a trace span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Scope scopeSpan(Data data) {
+    try (Scope scope = data.tracer.withSpan(data.spanToScope)) {
+      return scope;
+    }
+  }
+
+  /** Get current trace span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span getCurrentSpan(Data data) {
+    return data.tracer.getCurrentSpan();
+  }
+
+  /** Encode a span using binary format. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public byte[] encodeSpanBinary(Data data) {
+    return data.propagation.getBinaryFormat().toByteArray(data.spanToEncode.getContext());
+  }
+
+  /** Decode a span using binary format. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public SpanContext decodeSpanBinary(Data data) throws SpanContextParseException {
+    return data.propagation.getBinaryFormat().fromByteArray(data.spanToDecodeBinary);
+  }
+
+  /** Encode a span using text format. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public String encodeSpanText(Data data) {
+    return encodeSpanContextText(
+        data.propagation.getTraceContextFormat(), data.spanToEncode.getContext());
+  }
+
+  /** Decode a span using text format. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public SpanContext decodeSpanText(Data data) throws SpanContextParseException {
+    return data.propagation.getTraceContextFormat().extract(data.spanToDecodeText, textGetter);
+  }
+
+  /** Set status on a span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span setStatus(Data data) {
+    data.spanToSet.setStatus(STATUS_OK);
+    return data.spanToSet;
+  }
+
+  /** End a span. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span endSpan(Data data) {
+    data.spanToEnd.end();
+    return data.spanToEnd;
+  }
+
+  private static String encodeSpanContextText(TextFormat format, SpanContext context) {
+    StringBuilder builder = new StringBuilder();
+    format.inject(context, builder, textSetter);
+    return builder.toString();
+  }
+
+  private static final Setter<StringBuilder> textSetter =
+      new Setter<StringBuilder>() {
+        @Override
+        public void put(StringBuilder carrier, String key, String value) {
+          if (key.equals(TRACEPARENT_KEY)) {
+            carrier.append(value);
+          }
+        }
+      };
+
+  private static final Getter<String> textGetter =
+      new Getter<String>() {
+        @Override
+        public String get(String carrier, String key) {
+          return key.equals(TRACEPARENT_KEY) ? carrier : null;
+        }
+      };
+}
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BenchmarksUtil.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BenchmarksUtil.java
index e917817..2517b3e 100644
--- a/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BenchmarksUtil.java
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/BenchmarksUtil.java
@@ -19,6 +19,7 @@
 import io.opencensus.impllite.trace.TraceComponentImplLite;
 import io.opencensus.trace.Tracer;
 import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.PropagationComponent;
 
 /** Util class for Benchmarks. */
 final class BenchmarksUtil {
@@ -38,6 +39,20 @@
     }
   }
 
+  static PropagationComponent getPropagationComponent(String implementation) {
+    if (implementation.equals("impl")) {
+      // We can return the global tracer here because if impl is linked the global tracer will be
+      // the impl one.
+      // TODO(bdrutu): Make everything not be a singleton (disruptor, etc.) and use a new
+      // TraceComponentImpl similar to TraceComponentImplLite.
+      return Tracing.getPropagationComponent();
+    } else if (implementation.equals("impl-lite")) {
+      return traceComponentImplLite.getPropagationComponent();
+    } else {
+      throw new RuntimeException("Invalid tracer implementation requested.");
+    }
+  }
+
   // Avoid instances of this class.
   private BenchmarksUtil() {}
 }
diff --git a/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/SpanOperationsBenchmark.java b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/SpanOperationsBenchmark.java
new file mode 100644
index 0000000..7f16a37
--- /dev/null
+++ b/benchmarks/src/jmh/java/io/opencensus/benchmarks/trace/SpanOperationsBenchmark.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.benchmarks.trace;
+
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+/** Benchmarks for {@link Span}-related trace events. */
+@State(Scope.Benchmark)
+public class SpanOperationsBenchmark {
+  private static final String SPAN_NAME = "SpanName";
+  private static final String ANNOTATION_DESCRIPTION = "MyAnnotation";
+  private static final String ATTRIBUTE_KEY = "MyAttributeKey";
+  private static final String ATTRIBUTE_VALUE = "MyAttributeValue";
+  private static final long MESSAGE_ID = 1042;
+  private static final Tracestate TRACESTATE_DEFAULT = Tracestate.builder().build();
+
+  @State(Scope.Benchmark)
+  public static class Data {
+    private Span attributeSpan;
+    private Span annotationSpanEmpty;
+    private Span annotationSpanAttributes;
+    private Span annotationSpanAnnotation;
+    private Span messageEventSpan;
+    private Span linkSpan;
+    private Tracer tracer;
+    private AttributeValue[] attributeValues;
+    private String[] attributeKeys;
+    private Map<String, AttributeValue> attributeMap;
+    private MessageEvent[] messageEvents;
+    private Link[] links;
+
+    // @Param({"impl", "impl-lite"})
+    @Param({"impl"})
+    String implementation;
+
+    @Param({"true", "false"})
+    boolean recorded;
+
+    @Param({"true", "false"})
+    boolean sampled;
+
+    @Param({"0", "1", "4", "8", "16"})
+    // @Param({"0", "1", "16"})
+    int size;
+
+    @Setup
+    public void setup() {
+      tracer = BenchmarksUtil.getTracer(implementation);
+      attributeSpan = createSpan("Attribute");
+      annotationSpanEmpty = createSpan("AnnotationSpanEmpty");
+      annotationSpanAttributes = createSpan("AnnotationSpanAttributes");
+      annotationSpanAnnotation = createSpan("AnnotationSpanAnnotation");
+      messageEventSpan = createSpan("MessageEventSpan");
+      linkSpan = createSpan("LinkSpan");
+      initAttributes();
+    }
+
+    @TearDown
+    public void doTearDown() {
+      attributeSpan.end();
+      annotationSpanEmpty.end();
+      annotationSpanAttributes.end();
+      annotationSpanAnnotation.end();
+      messageEventSpan.end();
+      linkSpan.end();
+    }
+
+    private Span createSpan(String suffix) {
+      return tracer
+          .spanBuilderWithExplicitParent(SPAN_NAME + suffix, null)
+          .setRecordEvents(recorded)
+          .setSampler(sampled ? Samplers.alwaysSample() : Samplers.neverSample())
+          .startSpan();
+    }
+
+    private void initAttributes() {
+      attributeValues = createAttributeValues(size);
+      attributeKeys = new String[size];
+      attributeMap = new HashMap<>(size);
+      messageEvents = new MessageEvent[size];
+      links = new Link[size];
+      for (int i = 0; i < size; i++) {
+        attributeKeys[i] = ATTRIBUTE_KEY + "-i";
+        attributeMap.put(attributeKeys[i], attributeValues[i]);
+        messageEvents[i] = MessageEvent.builder(MessageEvent.Type.SENT, MESSAGE_ID + i).build();
+        links[i] =
+            Link.fromSpanContext(
+                SpanContext.create(
+                    TraceId.fromBytes(
+                        new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, (byte) i}),
+                    SpanId.fromBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) i}),
+                    TraceOptions.DEFAULT,
+                    TRACESTATE_DEFAULT),
+                Link.Type.PARENT_LINKED_SPAN);
+      }
+    }
+  }
+
+  /** Add attributes individually. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span putAttribute(Data data) {
+    Span span = data.attributeSpan;
+    for (int i = 0; i < data.size; i++) {
+      span.putAttribute(data.attributeKeys[i], data.attributeValues[i]);
+    }
+    return span;
+  }
+
+  /** Add attributes as a map. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span putAttributes(Data data) {
+    Span span = data.attributeSpan;
+    span.putAttributes(data.attributeMap);
+    return span;
+  }
+
+  /** Add an annotation as description only. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span addAnnotationEmpty(Data data) {
+    Span span = data.annotationSpanEmpty;
+    span.addAnnotation(ANNOTATION_DESCRIPTION);
+    return span;
+  }
+
+  /** Add an annotation with attributes. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span addAnnotationWithAttributes(Data data) {
+    Span span = data.annotationSpanAttributes;
+    span.addAnnotation(ANNOTATION_DESCRIPTION, data.attributeMap);
+    return span;
+  }
+
+  /** Add an annotation with an annotation. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span addAnnotationWithAnnotation(Data data) {
+    Span span = data.annotationSpanAnnotation;
+    Annotation annotation =
+        Annotation.fromDescriptionAndAttributes(ANNOTATION_DESCRIPTION, data.attributeMap);
+    span.addAnnotation(annotation);
+    return span;
+  }
+
+  /** Add message events. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span addMessageEvent(Data data) {
+    Span span = data.messageEventSpan;
+    for (int i = 0; i < data.size; i++) {
+      span.addMessageEvent(data.messageEvents[i]);
+    }
+    return span;
+  }
+
+  /** Add links. */
+  @Benchmark
+  @BenchmarkMode(Mode.AverageTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Span addLink(Data data) {
+    Span span = data.linkSpan;
+    for (int i = 0; i < data.size; i++) {
+      span.addLink(data.links[i]);
+    }
+    return span;
+  }
+
+  private static AttributeValue[] createAttributeValues(int size) {
+    AttributeValue[] attributeValues = new AttributeValue[size];
+    for (int i = 0; i < size; i++) {
+      attributeValues[i] = AttributeValue.stringAttributeValue(ATTRIBUTE_VALUE + "-" + i);
+    }
+    return attributeValues;
+  }
+}
diff --git a/build.gradle b/build.gradle
index 0775167..a979cc4 100644
--- a/build.gradle
+++ b/build.gradle
@@ -8,12 +8,13 @@
     }
     dependencies {
         classpath 'ru.vyarus:gradle-animalsniffer-plugin:1.4.6'
+        classpath("org.springframework.boot:spring-boot-gradle-plugin:2.0.5.RELEASE")
         classpath 'net.ltgt.gradle:gradle-errorprone-plugin:0.0.16'
         classpath "net.ltgt.gradle:gradle-apt-plugin:0.18"
         classpath 'com.github.ben-manes:gradle-versions-plugin:0.20.0'
-        classpath "gradle.plugin.com.github.sherter.google-java-format:google-java-format-gradle-plugin:0.7.1"
-        classpath "me.champeau.gradle:jmh-gradle-plugin:0.4.7"
-        classpath "gradle.plugin.io.morethan.jmhreport:gradle-jmh-report:0.7.0"
+        classpath "gradle.plugin.com.github.sherter.google-java-format:google-java-format-gradle-plugin:0.8"
+        classpath "me.champeau.gradle:jmh-gradle-plugin:0.4.8"
+        classpath "gradle.plugin.io.morethan.jmhreport:gradle-jmh-report:0.9.0"
     }
 }
 
@@ -21,11 +22,15 @@
 // Also see https://github.com/ben-manes/gradle-versions-plugin.
 apply plugin: 'com.github.ben-manes.versions'
 
-// Don't use the Checker Framework by default, since it interferes with Error Prone.
-def useCheckerFramework = rootProject.hasProperty('checkerFramework')
-def useErrorProne = !useCheckerFramework
-
 subprojects {
+    // OC-Agent exporter depends on grpc-core which depends on errorprone annotations, and it
+    // doesn't work with checker framework.
+    def projectsDependOnGrpcCore = ["opencensus-exporter-metrics-ocagent", "opencensus-exporter-trace-ocagent"]
+
+    // Don't use the Checker Framework by default, since it interferes with Error Prone.
+    def useCheckerFramework = rootProject.hasProperty('checkerFramework') && !(project.name in projectsDependOnGrpcCore)
+    def useErrorProne = !useCheckerFramework
+
     apply plugin: "checkstyle"
     apply plugin: 'maven'
     apply plugin: 'idea'
@@ -37,21 +42,19 @@
     apply plugin: 'ru.vyarus.animalsniffer'
     apply plugin: 'findbugs'
     apply plugin: 'net.ltgt.apt'
+    apply plugin: 'net.ltgt.apt-idea'
     apply plugin: "me.champeau.gradle.jmh"
     apply plugin: "io.morethan.jmhreport"
-    // Plugins that require java8
-    if (JavaVersion.current().isJava8Compatible()) {
-        if (useErrorProne) {
-            apply plugin: "net.ltgt.errorprone"
-        }
-        apply plugin: 'com.github.sherter.google-java-format'
+    apply plugin: 'com.github.sherter.google-java-format'
+    if (useErrorProne) {
+        apply plugin: "net.ltgt.errorprone"
     }
 
     group = "io.opencensus"
-    version = "0.17.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
+    version = "0.32.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
 
-    sourceCompatibility = 1.6
-    targetCompatibility = 1.6
+    sourceCompatibility = 1.7
+    targetCompatibility = 1.7
 
     repositories {
         mavenCentral()
@@ -76,45 +79,50 @@
         // https://groups.google.com/forum/#!topic/bazel-discuss/_R3A9TJSoPM
         it.options.compilerArgs += ["-Xlint:all", "-Xlint:-try", "-Xlint:-processing"]
         if (useErrorProne) {
-            if (JavaVersion.current().isJava8Compatible()) {
-                it.options.compilerArgs += ["-XepAllDisabledChecksAsWarnings", "-XepDisableWarningsInGeneratedCode"]
+            it.options.compilerArgs += ["-XepAllDisabledChecksAsWarnings", "-XepDisableWarningsInGeneratedCode"]
 
-                // MutableMethodReturnType can suggest returning Guava types from
-                // API methods (https://github.com/google/error-prone/issues/982).
-                it.options.compilerArgs += ["-Xep:MutableMethodReturnType:OFF"]
+            // -XepDisableWarningsInGeneratedCode doesn't suppress all warnings in generated
+            // protocol buffer code.
+            it.options.compilerArgs += ["-XepExcludedPaths:.*/gen_gradle/.*"]
 
-                // ReturnMissingNullable conflicts with Checker Framework null analysis.
-                it.options.compilerArgs += ["-Xep:ReturnMissingNullable:OFF"]
+            // MutableMethodReturnType can suggest returning Guava types from
+            // API methods (https://github.com/google/error-prone/issues/982).
+            it.options.compilerArgs += ["-Xep:MutableMethodReturnType:OFF"]
 
-                // OpenCensus doesn't currently use Var annotations.
-                it.options.compilerArgs += ["-Xep:Var:OFF"]
-            }
+            // ReturnMissingNullable conflicts with Checker Framework null analysis.
+            it.options.compilerArgs += ["-Xep:ReturnMissingNullable:OFF"]
+
+            // OpenCensus doesn't currently use Var annotations.
+            it.options.compilerArgs += ["-Xep:Var:OFF"]
+
+            // ImmutableRefactoring suggests using com.google.errorprone.annotations.Immutable,
+            // but OpenCensus currently uses javax.annotation.concurrent.Immutable
+            it.options.compilerArgs += ["-Xep:ImmutableRefactoring:OFF"]
+
+            // This check causes a NullPointerException
+            // (https://github.com/google/error-prone/issues/1138).
+            it.options.compilerArgs += ["-Xep:NullableDereference:OFF"]
+
+            // ExpectedExceptionRefactoring and TestExceptionRefactoring suggest using
+            // assertThrows, but assertThrows only works well with lambdas.
+            it.options.compilerArgs += ["-Xep:ExpectedExceptionRefactoring:OFF"]
+            it.options.compilerArgs += ["-Xep:TestExceptionRefactoring:OFF"]
         }
         if (useCheckerFramework) {
             it.options.compilerArgs += [
-                '-processor',
-		'com.google.auto.value.processor.AutoValueProcessor,org.checkerframework.checker.nullness.NullnessChecker',
-		"-Astubs=$rootDir/checker-framework/stubs"
+                    '-processor',
+                    'com.google.auto.value.processor.AutoValueProcessor,org.checkerframework.checker.nullness.NullnessChecker',
+                    "-Astubs=$rootDir/checker-framework/stubs"
             ]
         }
         it.options.encoding = "UTF-8"
         // Protobuf-generated code produces some warnings.
         // https://github.com/google/protobuf/issues/2718
         it.options.compilerArgs += ["-Xlint:-cast"]
-        if (!JavaVersion.current().isJava9()) {
+        if (!JavaVersion.current().isJava9() && !useErrorProne) {
             // TODO(sebright): Enable -Werror for Java 9 once we upgrade AutoValue (issue #1017).
             it.options.compilerArgs += ["-Werror"]
         }
-        if (JavaVersion.current().isJava7()) {
-            // Suppress all deprecation warnings with Java 7, since there are some bugs in its handling of
-            // @SuppressWarnings. See
-            // https://stackoverflow.com/questions/26921774/how-to-avoid-deprecation-warnings-when-suppresswarningsdeprecation-doesnt
-            it.options.compilerArgs += ["-Xlint:-deprecation"]
-
-            // TODO(bdrutu): Enable for Java 7 when fix the issue with configuring bootstrap class.
-            // [options] bootstrap class path not set in conjunction with -source 1.6
-            it.options.compilerArgs += ["-Xlint:-options"]
-        }
         if (JavaVersion.current().isJava9()) {
             // TODO(sebright): Currently, building with Java 9 produces the following "options" warnings:
             //
@@ -147,36 +155,42 @@
     }
 
     ext {
-        appengineVersion = '1.9.64'
+        appengineVersion = '1.9.71'
         aspectjVersion = '1.8.11'
         autoValueVersion = '1.4'
         findBugsAnnotationsVersion = '3.0.1'
         findBugsJsr305Version = '3.0.2'
-        errorProneVersion = '2.3.1'
-        grpcVersion = '1.14.0'
-        guavaVersion = '20.0'
-        googleAuthVersion = '0.11.0'
-        googleCloudBetaVersion = '0.64.0-beta'
-        googleCloudGaVersion = '1.46.0'
-        signalfxVersion = '0.0.39'
+        errorProneVersion = '2.3.2'
+        grpcVersion = '1.27.2'
+        guavaVersion = '29.0-android'
+        googleAuthVersion = '0.20.0'
+        googleCloudBetaVersion = '0.100.0-beta'
+        googleCloudGaVersion = '1.82.0'
+        signalfxVersion = '0.0.48'
+        springBoot2Version = '2.1.5.RELEASE'
         springBootVersion = '1.5.15.RELEASE'
+        springBootTestVersion = '2.1.1.RELEASE'
         springCloudVersion = '1.3.4.RELEASE'
         springVersion = '4.3.12.RELEASE'
-        prometheusVersion = '0.4.0'
-        protobufVersion = '3.5.1'
-        zipkinReporterVersion = '2.3.2'
-        jaegerReporterVersion = '0.27.0'
-        opencensusProtoVersion = '0.0.2'
+        prometheusVersion = '0.6.0'
+        protobufVersion = '3.11.4'
+        zipkinReporterVersion = '2.7.14'
+        jaegerReporterVersion = '0.33.1'
+        opencensusProtoVersion = '0.2.0'
+        gsonVersion = '2.8.6'
         dropwizardVersion = '3.1.2'
+        dropwizard5Version = '5.0.0'
+        javaxServletVersion = "3.1.0"
+        httpcomponentsVersion = "4.5.8"
 
         libraries = [
                 appengine_api: "com.google.appengine:appengine-api-1.0-sdk:${appengineVersion}",
                 aspectj: "org.aspectj:aspectjrt:${aspectjVersion}",
                 auto_value: "com.google.auto.value:auto-value:${autoValueVersion}",
                 auto_service: 'com.google.auto.service:auto-service:1.0-rc3',
-                byte_buddy: 'net.bytebuddy:byte-buddy:1.7.11',
+                byte_buddy: 'net.bytebuddy:byte-buddy:1.8.22',
                 config: 'com.typesafe:config:1.2.1',
-                disruptor: 'com.lmax:disruptor:3.4.1',
+                disruptor: 'com.lmax:disruptor:3.4.2',
                 errorprone: "com.google.errorprone:error_prone_annotations:${errorProneVersion}",
                 findbugs_annotations: "com.google.code.findbugs:annotations:${findBugsAnnotationsVersion}",
                 google_auth: "com.google.auth:google-auth-library-credentials:${googleAuthVersion}",
@@ -184,17 +198,20 @@
                 google_cloud_trace: "com.google.cloud:google-cloud-trace:${googleCloudBetaVersion}",
                 zipkin_reporter: "io.zipkin.reporter2:zipkin-reporter:${zipkinReporterVersion}",
                 zipkin_urlconnection: "io.zipkin.reporter2:zipkin-sender-urlconnection:${zipkinReporterVersion}",
-                jaeger_reporter: "com.uber.jaeger:jaeger-core:${jaegerReporterVersion}",
+                jaeger_reporter: "io.jaegertracing:jaeger-client:${jaegerReporterVersion}",
                 google_cloud_monitoring: "com.google.cloud:google-cloud-monitoring:${googleCloudGaVersion}",
+                grpc_auth: "io.grpc:grpc-auth:${grpcVersion}",
                 grpc_context: "io.grpc:grpc-context:${grpcVersion}",
                 grpc_core: "io.grpc:grpc-core:${grpcVersion}",
                 grpc_netty: "io.grpc:grpc-netty:${grpcVersion}",
+                grpc_netty_shaded: "io.grpc:grpc-netty-shaded:${grpcVersion}",
                 grpc_stub: "io.grpc:grpc-stub:${grpcVersion}",
                 guava: "com.google.guava:guava:${guavaVersion}",
                 jsr305: "com.google.code.findbugs:jsr305:${findBugsJsr305Version}",
                 signalfx_java: "com.signalfx.public:signalfx-java:${signalfxVersion}",
                 spring_aspects: "org.springframework:spring-aspects:${springVersion}",
                 spring_boot_starter_web: "org.springframework.boot:spring-boot-starter-web:${springBootVersion}",
+                spring_boot_starter_web2: "org.springframework.boot:spring-boot-starter-web:${springBoot2Version}",
                 spring_cloud_build: "org.springframework.cloud:spring-cloud-build:${springCloudVersion}",
                 spring_cloud_starter_sleuth: "org.springframework.cloud:spring-cloud-starter-sleuth:${springCloudVersion}",
                 spring_context: "org.springframework:spring-context:${springVersion}",
@@ -202,14 +219,22 @@
                 prometheus_simpleclient: "io.prometheus:simpleclient:${prometheusVersion}",
                 protobuf: "com.google.protobuf:protobuf-java:${protobufVersion}",
                 opencensus_proto: "io.opencensus:opencensus-proto:${opencensusProtoVersion}",
+                gson: "com.google.code.gson:gson:${gsonVersion}",
+                httpcomponents: "org.apache.httpcomponents:httpclient:${httpcomponentsVersion}",
 
                 // Test dependencies.
                 guava_testlib: "com.google.guava:guava-testlib:${guavaVersion}",
                 junit: 'junit:junit:4.12',
-                mockito: 'org.mockito:mockito-core:1.9.5',
+                mockito: 'org.mockito:mockito-core:2.28.1',
                 spring_test: "org.springframework:spring-test:${springVersion}",
-                truth: 'com.google.truth:truth:0.30',
+                truth: 'com.google.truth:truth:1.0',
+                spring_boot_test: "org.springframework.boot:spring-boot-starter-test:${springBootTestVersion}",
+                spring_boot_test2: "org.springframework.boot:spring-boot-starter-test:${springBoot2Version}",
                 dropwizard: "io.dropwizard.metrics:metrics-core:${dropwizardVersion}",
+                dropwizard5: "io.dropwizard.metrics5:metrics-core:${dropwizard5Version}",
+                sprint_boot_starter_tomcat: "org.springframework.boot:spring-boot-starter-tomcat:${springBoot2Version}",
+                javax_servlet: "javax.servlet:javax.servlet-api:${javaxServletVersion}",
+
         ]
     }
 
@@ -217,10 +242,7 @@
         compile {
             // Detect Maven Enforcer's dependencyConvergence failures. We only
             // care for artifacts used as libraries by others.
-            if (!(project.name in ['benchmarks', 'opencensus-all',
-                                   'opencensus-exporter-stats-stackdriver',
-                                   'opencensus-exporter-trace-stackdriver',
-                                   'opencensus-exporter-trace-jaeger'])) {
+            if (!(project.name in ['benchmarks', 'opencensus-all'])) {
                 resolutionStrategy.failOnVersionConflict()
             }
         }
@@ -228,7 +250,7 @@
 
     dependencies {
         if (useCheckerFramework) {
-            ext.checkerFrameworkVersion = '2.5.5'
+            ext.checkerFrameworkVersion = '2.10.1'
 
             // 2.4.0 is the last version of the Checker Framework compiler that supports annotations
             // in comments, though it should continue to work with newer versions of the Checker Framework.
@@ -245,14 +267,14 @@
         }
 
         compileOnly libraries.errorprone,
-                    libraries.jsr305
+                libraries.jsr305
 
         testCompile libraries.guava_testlib,
                 libraries.junit,
                 libraries.mockito,
                 libraries.truth
 
-    if (useErrorProne && JavaVersion.current().isJava8Compatible()) {
+        if (useErrorProne) {
             // The ErrorProne plugin defaults to the latest, which would break our
             // build if error prone releases a new version with a new check
             errorprone "com.google.errorprone:error_prone_core:${errorProneVersion}"
@@ -297,32 +319,27 @@
         configProperties["rootDir"] = rootDir
     }
 
-    // Disable checkstyle if no java8.
-    checkstyleMain.enabled = JavaVersion.current().isJava8Compatible()
-    checkstyleTest.enabled = JavaVersion.current().isJava8Compatible()
-    checkstyleJmh.enabled = JavaVersion.current().isJava8Compatible()
+    googleJavaFormat {
+        toolVersion '1.7'
+    }
 
-    // Google formatter works only on java8.
-    if (JavaVersion.current().isJava8Compatible()) {
-        googleJavaFormat {
-            toolVersion '1.6'
+    afterEvaluate {  // Allow subproject to add more source sets.
+        tasks.googleJavaFormat {
+            source = sourceSets*.allJava
+            include '**/*.java'
         }
 
-        afterEvaluate {  // Allow subproject to add more source sets.
-            tasks.googleJavaFormat {
-                source = sourceSets*.allJava
-                include '**/*.java'
-            }
-
-            tasks.verifyGoogleJavaFormat {
-                source = sourceSets*.allJava
-                include '**/*.java'
-            }
+        tasks.verifyGoogleJavaFormat {
+            source = sourceSets*.allJava
+            include '**/*.java'
         }
     }
 
     signing {
         required false
+        if (rootProject.hasProperty('signingUseGpgCmd')) {
+          useGpgCmd()
+        }
         sign configurations.archives
     }
 
@@ -419,24 +436,35 @@
                  'opencensus-contrib-agent',
                  'opencensus-contrib-appengine-standard-util',
                  'opencensus-contrib-dropwizard',
+                 'opencensus-contrib-dropwizard5',
                  'opencensus-contrib-exemplar-util',
                  'opencensus-contrib-grpc-metrics',
                  'opencensus-contrib-grpc-util',
+                 'opencensus-contrib-http-jaxrs',
+                 'opencensus-contrib-http-jetty-client',
+                 'opencensus-contrib-http-servlet',
                  'opencensus-contrib-http-util',
                  'opencensus-contrib-log-correlation-stackdriver',
-                 'opencensus-contrib-monitored-resource-util',
+                 'opencensus-contrib-observability-ready-util',
+                 'opencensus-contrib-resource-util',
                  'opencensus-contrib-spring',
                  'opencensus-contrib-spring-sleuth-v1x',
+                 'opencensus-contrib-spring-starter',
                  'opencensus-contrib-zpages',
+                 'opencensus-exporter-metrics-ocagent',
+                 'opencensus-exporter-metrics-util',
                  'opencensus-exporter-stats-prometheus',
                  'opencensus-exporter-stats-signalfx',
                  'opencensus-exporter-stats-stackdriver',
+                 'opencensus-exporter-trace-datadog',
+                 'opencensus-exporter-trace-elasticsearch',
                  'opencensus-exporter-trace-instana',
                  'opencensus-exporter-trace-logging',
                  'opencensus-exporter-trace-ocagent',
                  'opencensus-exporter-trace-stackdriver',
                  'opencensus-exporter-trace-zipkin',
                  'opencensus-exporter-trace-jaeger',
+                 'opencensus-exporter-trace-util',
                  'opencensus-impl-core',
                  'opencensus-impl-lite',
                  'opencensus-impl',
@@ -460,10 +488,10 @@
             tasks.withType(JavaCompile).all { JavaCompile compile ->
                 compile.doFirst {
                     compile.options.compilerArgs += [
-                        '-Xmaxerrs', '10000',
-                        "-Xbootclasspath/p:${configurations.checkerFrameworkAnnotatedJDK.asPath}",
-                        "-AskipDefs=\\.AutoValue_|^io.opencensus.contrib.appengine.standard.util.TraceIdProto\$|^io.opencensus.contrib.appengine.standard.util.TraceProto\$",
-                        "-AinvariantArrays"
+                            '-Xmaxerrs', '10000',
+                            "-Xbootclasspath/p:${configurations.checkerFrameworkAnnotatedJDK.asPath}",
+                            "-AskipDefs=\\.AutoValue_|^io.opencensus.contrib.appengine.standard.util.TraceIdProto\$|^io.opencensus.contrib.appengine.standard.util.TraceProto\$",
+                            "-AinvariantArrays"
                     ]
                     options.fork = true
                     options.forkOptions.jvmArgs += ["-Xbootclasspath/p:${configurations.checkerFrameworkJavac.asPath}"]
diff --git a/scripts/check-git-history.py b/buildscripts/check-git-history.py
similarity index 100%
rename from scripts/check-git-history.py
rename to buildscripts/check-git-history.py
diff --git a/buildscripts/checkstyle.xml b/buildscripts/checkstyle.xml
index 50b146e..782add8 100644
--- a/buildscripts/checkstyle.xml
+++ b/buildscripts/checkstyle.xml
@@ -173,7 +173,6 @@
             <message key="name.invalidPattern"
              value="Interface type name ''{0}'' must match pattern ''{1}''."/>
         </module>
-        <module name="NoFinalizer"/>
         <module name="GenericWhitespace">
             <message key="ws.followed"
              value="GenericWhitespace ''{0}'' is followed by whitespace."/>
diff --git a/buildscripts/import-control.xml b/buildscripts/import-control.xml
index 6dffe25..0bb9d0f 100644
--- a/buildscripts/import-control.xml
+++ b/buildscripts/import-control.xml
@@ -42,10 +42,20 @@
     <allow pkg="io.opencensus.internal"/>
     <allow pkg="io.opencensus.common"/>
     <allow pkg="io.opencensus.metrics"/>
+    <allow pkg="io.opencensus.metrics.data"/>
+    <subpackage name="data">
+      <allow pkg="io.opencensus.metrics.data"/>
+    </subpackage>
+  </subpackage>
+  <subpackage name="resource">
+    <allow pkg="io.opencensus.common"/>
+    <allow pkg="io.opencensus.internal"/>
+    <allow pkg="io.opencensus.resource"/>
   </subpackage>
   <subpackage name="stats">
     <allow pkg="io.opencensus.common"/>
     <allow pkg="io.opencensus.internal"/>
+    <allow pkg="io.opencensus.metrics.data"/>
     <allow pkg="io.opencensus.stats"/>
     <allow pkg="io.opencensus.tags"/>
   </subpackage>
@@ -80,6 +90,7 @@
       <allow pkg="io.opencensus.trace"/>
     </subpackage>
     <subpackage name="exemplar.util">
+      <allow pkg="io.opencensus.metrics.data"/>
       <allow pkg="io.opencensus.stats"/>
       <allow pkg="io.opencensus.trace"/>
     </subpackage>
@@ -88,22 +99,53 @@
       <allow pkg="io.opencensus.stats"/>
       <allow pkg="io.opencensus.tags"/>
     </subpackage>
-    <subpackage name="http.util">
+    <subpackage name="http.jetty.client">
+      <allow pkg="io.opencensus.contrib.http"/>
+      <allow pkg="io.opencensus.contrib.http.jetty.client"/>
+      <allow pkg="io.opencensus.contrib.http.util"/>
+      <allow pkg="io.opencensus.trace"/>
+      <allow pkg="org.eclipse.jetty.client"/>
+      <allow pkg="org.eclipse.jetty.util.ssl"/>
+    </subpackage>
+    <subpackage name="http.servlet">
+      <allow pkg="io.opencensus.contrib.http"/>
+      <allow pkg="io.opencensus.contrib.http.servlet"/>
+      <allow pkg="io.opencensus.contrib.http.util"/>
+      <allow pkg="io.opencensus.trace"/>
+      <allow pkg="javax.servlet"/>
+      <allow pkg="org.eclipse.jetty.server"/>
+    </subpackage>
+    <subpackage name="http">
+      <allow pkg="io.opencensus.contrib.http"/>
       <allow pkg="io.opencensus.contrib.http.util"/>
       <allow pkg="io.opencensus.stats"/>
       <allow pkg="io.opencensus.tags"/>
       <allow pkg="io.opencensus.trace"/>
+      <allow pkg="io.opencensus.trace.propagation"/>
     </subpackage>
     <subpackage name="logcorrelation.stackdriver">
       <allow pkg="com.google.cloud"/>
       <allow pkg="io.opencensus.trace"/>
     </subpackage>
     <subpackage name="spring">
+      <allow pkg="edu.umd.cs.findbugs.annotations"/>
       <allow pkg="io.opencensus.trace"/>
+      <allow pkg="io.opencensus.contrib.http"/>
+      <allow pkg="io.opencensus.contrib.http.servlet"/>
+      <allow pkg="io.opencensus.contrib.spring"/>
       <allow pkg="org.aspectj.lang"/>
       <allow pkg="org.aspectj.lang.annotation"/>
       <allow pkg="org.aspectj.lang.reflect"/>
       <allow pkg="org.springframework.beans.factory.annotation"/>
+      <allow pkg="org.springframework.beans.factory.config"/>
+      <allow pkg="org.springframework.boot.autoconfigure"/>
+      <allow pkg="org.springframework.boot.context"/>
+      <allow pkg="org.springframework.context.annotation"/>
+      <allow pkg="org.springframework.core"/>
+      <allow pkg="org.springframework.http"/>
+      <allow pkg="org.springframework.stereotype"/>
+      <allow pkg="org.springframework.util.concurrent"/>
+      <allow pkg="org.springframework.web.client"/>
       <subpackage name="sleuth">
         <allow pkg="io.opencensus.trace"/>
         <allow pkg="org.apache.commons.logging"/>
@@ -125,8 +167,22 @@
       <allow pkg="io.opencensus.tags"/>
       <allow pkg="io.opencensus.trace"/>
     </subpackage>
-    <subpackage name="monitoredresource.util">
-      <allow pkg="io.opencensus.contrib.monitoredresource.util"/>
+    <subpackage name="observability.ready.util">
+      <allow pkg="io.opencensus.contrib.grpc.metrics"/>
+      <allow pkg="io.opencensus.trace"/>
+      <allow pkg="io.opencensus.exporter.metrics.ocagent"/>
+      <allow pkg="io.opencensus.exporter.trace.ocagent"/>
+    </subpackage>
+    <subpackage name="resource.util">
+      <allow pkg="io.opencensus.contrib.resource.util"/>
+      <allow pkg="io.opencensus.resource"/>
+    </subpackage>
+    <subpackage name="dropwizard5">
+      <allow pkg="io.opencensus.contrib.dropwizard5"/>
+      <allow pkg="io.opencensus.metrics"/>
+      <allow pkg="io.opencensus.implcore"/>
+      <allow pkg="io.opencensus.internal"/>
+      <allow pkg="io.dropwizard.metrics5"/>
     </subpackage>
     <subpackage name="dropwizard">
       <allow pkg="io.opencensus.contrib.dropwizard"/>
@@ -139,9 +195,27 @@
   <subpackage name="exporter">
     <allow pkg="com.google.common"/>
     <allow pkg="io.opencensus.common"/>
+    <allow pkg="io.opencensus.resource"/>
+    <subpackage name="metrics">
+      <subpackage name="ocagent">
+        <allow pkg="com.google.protobuf"/>
+        <allow pkg="io.grpc"/>
+        <allow pkg="io.netty.handler.ssl"/>
+        <allow pkg="io.opencensus.contrib.opencensus.proto.util"/>
+        <allow pkg="io.opencensus.contrib.resource.util"/>
+        <allow pkg="io.opencensus.exporter.metrics.ocagent"/>
+        <allow pkg="io.opencensus.metrics"/>
+        <allow pkg="io.opencensus.proto"/>
+      </subpackage>
+      <subpackage name="util">
+        <allow pkg="io.opencensus.exporter.metrics.util"/>
+        <allow pkg="io.opencensus.metrics"/>
+        <allow pkg="io.opencensus.trace"/>
+      </subpackage>
+    </subpackage>
     <subpackage name="stats">
-      <allow pkg="io.opencensus.stats"/>
-      <allow pkg="io.opencensus.tags"/>
+      <allow pkg="io.opencensus.metrics"/>
+      <allow pkg="io.opencensus.exporter.metrics.util"/>
       <subpackage name="prometheus">
         <allow pkg="io.opencensus.exporter.stats.prometheus"/>
         <allow pkg="io.opencensus.trace"/>
@@ -155,24 +229,28 @@
       <subpackage name="stackdriver">
         <allow pkg="com.google"/>
         <allow pkg="io.opencensus.exporter.stats.stackdriver"/>
+        <allow pkg="io.opencensus.metrics.data"/>
         <allow pkg="io.opencensus.trace"/>
-        <allow pkg="io.opencensus.contrib.monitoredresource.util"/>
+        <allow pkg="io.opencensus.contrib.exemplar.util"/>
+        <allow pkg="io.opencensus.contrib.resource.util"/>
       </subpackage>
     </subpackage>
     <subpackage name="trace">
       <allow pkg="io.opencensus.trace"/>
+      <allow pkg="io.opencensus.exporter.trace.util"/>
       <subpackage name="instana">
         <allow pkg="io.opencensus.exporter.trace.instana"/>
       </subpackage>
       <subpackage name="jaeger">
-        <allow pkg="com.uber.jaeger"/>
+        <allow pkg="io.jaegertracing"/>
         <allow pkg="io.opencensus.exporter.trace.jaeger"/>
         <allow pkg="org.apache.thrift"/>
       </subpackage>
       <subpackage name="ocagent">
         <allow pkg="com.google.protobuf"/>
         <allow pkg="io.grpc"/>
-        <allow pkg="io.opencensus.contrib.monitoredresource.util"/>
+        <allow pkg="io.netty.handler.ssl"/>
+        <allow pkg="io.opencensus.contrib.resource.util"/>
         <allow pkg="io.opencensus.contrib.opencensus.proto.util"/>
         <allow pkg="io.opencensus.exporter.trace.ocagent"/>
         <allow pkg="io.opencensus.proto"/>
@@ -181,12 +259,18 @@
       <subpackage name="stackdriver">
         <allow pkg="com.google"/>
         <allow pkg="io.opencensus.exporter.trace.stackdriver"/>
-        <allow pkg="io.opencensus.contrib.monitoredresource.util"/>
+        <allow pkg="io.opencensus.contrib.resource.util"/>
       </subpackage>
       <subpackage name="zipkin">
         <allow pkg="io.opencensus.exporter.trace.zipkin"/>
         <allow pkg="zipkin2"/>
       </subpackage>
+      <subpackage name="datadog">
+        <allow pkg="io.opencensus.exporter.trace.datadog"/>
+        <allow pkg="edu.umd.cs.findbugs.annotations"/>
+        <allow pkg="com.google.gson"/>
+        <allow pkg="com.google.auto.value"/>
+      </subpackage>
     </subpackage>
   </subpackage>
   <subpackage name="implcore">
@@ -241,6 +325,11 @@
     <allow pkg="io.opencensus.tags"/>
     <allow pkg="io.opencensus.testing.export"/>
     <allow pkg="io.opencensus.trace"/>
+    <allow pkg="io.opencensus.metrics"/>
     <allow pkg="io.prometheus"/>
+    <allow pkg="org.apache.log4j"/>
+    <allow pkg="org.eclipse.jetty"/>
+    <allow pkg="javax.servlet"/>
+    <allow pkg="org.springframework"/>
   </subpackage>
 </import-control>
diff --git a/buildscripts/kokoro/linux_example_bazel.cfg b/buildscripts/kokoro/linux_example_bazel.cfg
deleted file mode 100644
index 3f4c872..0000000
--- a/buildscripts/kokoro/linux_example_bazel.cfg
+++ /dev/null
@@ -1,10 +0,0 @@
-# Config file for child task BUILD_EXAMPLES_BAZEL
-
-env_vars {
-  key: "TASK"
-  value: "BUILD_EXAMPLES_BAZEL"
-}
-
-# Location of the continuous shell script in repository.
-build_file: "opencensus-java/buildscripts/kokoro/linux_presubmit.sh"
-timeout_mins: 60
diff --git a/buildscripts/kokoro/linux_example_format.cfg b/buildscripts/kokoro/linux_example_format.cfg
deleted file mode 100644
index 6f9a3dc..0000000
--- a/buildscripts/kokoro/linux_example_format.cfg
+++ /dev/null
@@ -1,9 +0,0 @@
-# Config file for child task CHECK_EXAMPLES_FORMAT
-env_vars {
-  key: "TASK"
-  value: "CHECK_EXAMPLES_FORMAT"
-}
-
-# Location of the continuous shell script in repository.
-build_file: "opencensus-java/buildscripts/kokoro/linux_presubmit.sh"
-timeout_mins: 60
diff --git a/buildscripts/kokoro/linux_example_license.cfg b/buildscripts/kokoro/linux_example_license.cfg
deleted file mode 100644
index 19cc67d..0000000
--- a/buildscripts/kokoro/linux_example_license.cfg
+++ /dev/null
@@ -1,10 +0,0 @@
-# Config file for child task CHECK_EXAMPLES_LICENSE
-
-env_vars {
-  key: "TASK"
-  value: "CHECK_EXAMPLES_LICENSE"
-}
-
-# Location of the continuous shell script in repository.
-build_file: "opencensus-java/buildscripts/kokoro/linux_presubmit.sh"
-timeout_mins: 60
diff --git a/buildscripts/kokoro/linux_presubmit.sh b/buildscripts/kokoro/linux_presubmit.sh
index bb1281b..c0c36cd 100755
--- a/buildscripts/kokoro/linux_presubmit.sh
+++ b/buildscripts/kokoro/linux_presubmit.sh
@@ -18,12 +18,9 @@
   echo "Valid tasks are"
   echo ""
   echo "- BUILD"
-  echo "- BUILD_EXAMPLES_BAZEL"
   echo "- BUILD_EXAMPLES_GRADLE"
   echo "- BUILD_EXAMPLES_MAVEN"
   echo "- CHECKER_FRAMEWORK"
-  echo "- CHECK_EXAMPLES_FORMAT"
-  echo "- CHECK_EXAMPLES_LICENSE"
   echo "- CHECK_GIT_HISTORY"
 }
 
@@ -36,7 +33,7 @@
 
 case "$TASK" in
   "CHECK_GIT_HISTORY")
-    python ./scripts/check-git-history.py
+    python ./buildscripts/check-git-history.py
     ;;
   "BUILD")
     ./gradlew clean assemble --stacktrace
@@ -63,24 +60,12 @@
   "CHECKER_FRAMEWORK")
     ./gradlew clean assemble -PcheckerFramework=true
     ;;
-  "CHECK_EXAMPLES_LICENSE")
-    curl -L -o checkstyle-8.12-all.jar https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.12/checkstyle-8.12-all.jar
-    java -DrootDir=. -jar checkstyle-8.12-all.jar -c buildscripts/checkstyle.xml examples/src/
-    ;;
-  "CHECK_EXAMPLES_FORMAT")
-    curl -L -o google-java-format-1.5-all-deps.jar \
-      https://github.com/google/google-java-format/releases/download/google-java-format-1.5/google-java-format-1.5-all-deps.jar
-    java -jar google-java-format-1.5-all-deps.jar --set-exit-if-changed --dry-run `find examples/src/ -name '*.java'`
-    ;;
   "BUILD_EXAMPLES_GRADLE")
-    pushd examples && ./gradlew clean assemble --stacktrace && popd
+    pushd examples && ./gradlew clean assemble --stacktrace && ./gradlew check && ./gradlew verGJF && popd
     ;;
   "BUILD_EXAMPLES_MAVEN")
     pushd examples && mvn clean package appassembler:assemble -e && popd
     ;;
-  "BUILD_EXAMPLES_BAZEL")
-    pushd examples && bazel clean && bazel build :all && popd
-    ;;
   *)
     set +x
     echo "Unknown task $TASK"
diff --git a/buildscripts/travis_script b/buildscripts/travis_script
new file mode 100755
index 0000000..01baf2b
--- /dev/null
+++ b/buildscripts/travis_script
@@ -0,0 +1,64 @@
+#!/bin/bash
+#
+# Travis build script, cf.
+# https://docs.travis-ci.com/user/customizing-the-build/#Implementing-Complex-Build-Steps.
+
+set -o errexit
+set -o xtrace
+
+case "$TASK" in
+  "CHECK_GIT_HISTORY")
+    python "$(dirname "$0")"/check-git-history.py
+    ;;
+  "BUILD")
+    case "$TRAVIS_OS_NAME" in
+      "linux")
+        case "$TRAVIS_JDK_VERSION" in
+          "oraclejdk9")
+            ./gradlew clean assemble check --stacktrace
+            ;;
+          "openjdk10")
+            ./gradlew clean assemble check --stacktrace
+            ;;
+          "openjdk11")
+            ./gradlew clean assemble check --stacktrace
+            ;;
+          "openjdk8")
+            ./gradlew clean assemble --stacktrace
+            ./gradlew check :opencensus-all:jacocoTestReport
+            ./gradlew verGJF
+            ;;
+          *)
+            echo "Unknown JDK version $TRAVIS_JDK_VERSION"
+            exit 1
+            ;;
+        esac
+        ;;
+      "osx")
+        # OS X is a separate case, because the JDK version is determined by the OS X image:
+        # https://docs.travis-ci.com/user/reference/osx/#JDK-and-OS-X
+        ./gradlew clean assemble --stacktrace
+        ./gradlew check
+        ;;
+      *)
+        echo "Unknown OS name $TRAVIS_OS_NAME"
+        exit 1
+        ;;
+    esac
+    ;;
+  "CHECKER_FRAMEWORK")
+    ./gradlew clean assemble -PcheckerFramework=true
+    ;;
+  "BUILD_EXAMPLES_GRADLE")
+    pushd examples && ./gradlew clean assemble --stacktrace && ./gradlew check && ./gradlew verGJF && popd
+    pushd examples/spring/servlet && ./gradlew clean assemble --stacktrace && ./gradlew check && ./gradlew verGJF && popd
+    ;;
+  "BUILD_EXAMPLES_MAVEN")
+    pushd examples && mvn clean package appassembler:assemble -e && popd
+    pushd examples/spring/servlet && mvn clean package appassembler:assemble -e && popd
+    ;;
+  *)
+    echo "Unknown task $TASK"
+    exit 1
+    ;;
+esac
diff --git a/checker-framework/stubs/google-cloud-java.astub b/checker-framework/stubs/google-cloud-java.astub
new file mode 100644
index 0000000..7ff09e7
--- /dev/null
+++ b/checker-framework/stubs/google-cloud-java.astub
@@ -0,0 +1,8 @@
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+package com.google.cloud;
+
+class ServiceOptions<ServiceT extends Service<OptionsT>, OptionsT extends ServiceOptions<ServiceT, OptionsT>> {
+  @Nullable
+  static String getDefaultProjectId();
+}
diff --git a/contrib/agent/README.md b/contrib/agent/README.md
index f24c28a..3e30b7c 100644
--- a/contrib/agent/README.md
+++ b/contrib/agent/README.md
@@ -63,7 +63,7 @@
 `-javaagent:path/to/opencensus-contrib-agent-X.Y.Z.jar` to the invocation of the `java`
 executable as shown in the following example. Replace `X.Y.Z` with the actual version number.
 
-```shell
+```bash
 java -javaagent:path/to/opencensus-contrib-agent-X.Y.Z.jar ...
 ```
 
@@ -80,7 +80,7 @@
 For example, to disable the automatic context propagation for Executors, add a system property as
 follows:
 
-```shell
+```bash
 java -javaagent:path/to/opencensus-contrib-agent-X.Y.Z.jar \
      -Dopencensus.contrib.agent.context-propagation.executor.enabled=false \
      ...
diff --git a/contrib/agent/build.gradle b/contrib/agent/build.gradle
index 11271a4..194fb09 100644
--- a/contrib/agent/build.gradle
+++ b/contrib/agent/build.gradle
@@ -20,11 +20,11 @@
 
 dependencies {
   compileOnly libraries.auto_service
+  compileOnly libraries.findbugs_annotations
   compileOnly libraries.grpc_context
   compileOnly project(':opencensus-api')
   compile libraries.byte_buddy
   compile libraries.config
-  compile libraries.findbugs_annotations
   compile libraries.guava
 
   signature 'org.codehaus.mojo.signature:java17:1.0@signature'
@@ -156,9 +156,6 @@
   integrationTestRuntime project(':opencensus-impl-lite')
 }
 
-// Disable checkstyle for integration tests if not java8.
-checkstyleIntegrationTest.enabled = JavaVersion.current().isJava8Compatible()
-
 // Disable findbugs for integration tests, too.
 findbugsIntegrationTest.enabled = false
 
diff --git a/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationIT.java b/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationIT.java
index 7cab559..f908ba4 100644
--- a/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationIT.java
+++ b/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationIT.java
@@ -69,8 +69,8 @@
         new Runnable() {
           @Override
           public void run() {
-            assertThat(Thread.currentThread()).isNotSameAs(callerThread);
-            assertThat(Context.current()).isSameAs(context);
+            assertThat(Thread.currentThread()).isNotSameInstanceAs(callerThread);
+            assertThat(Context.current()).isSameInstanceAs(context);
             assertThat(KEY.get()).isEqualTo("myvalue");
             tested.release();
           }
@@ -92,8 +92,8 @@
             new Callable<Void>() {
               @Override
               public Void call() throws Exception {
-                assertThat(Thread.currentThread()).isNotSameAs(callerThread);
-                assertThat(Context.current()).isSameAs(context);
+                assertThat(Thread.currentThread()).isNotSameInstanceAs(callerThread);
+                assertThat(Context.current()).isSameInstanceAs(context);
                 assertThat(KEY.get()).isEqualTo("myvalue");
                 tested.set(true);
 
@@ -118,8 +118,8 @@
             new Runnable() {
               @Override
               public void run() {
-                assertThat(Thread.currentThread()).isNotSameAs(callerThread);
-                assertThat(Context.current()).isSameAs(context);
+                assertThat(Thread.currentThread()).isNotSameInstanceAs(callerThread);
+                assertThat(Context.current()).isSameInstanceAs(context);
                 assertThat(KEY.get()).isEqualTo("myvalue");
                 tested.set(true);
               }
@@ -143,16 +143,16 @@
             new Runnable() {
               @Override
               public void run() {
-                assertThat(Thread.currentThread()).isNotSameAs(callerThread);
-                assertThat(Context.current()).isNotSameAs(Context.ROOT);
-                assertThat(Context.current()).isSameAs(context);
+                assertThat(Thread.currentThread()).isNotSameInstanceAs(callerThread);
+                assertThat(Context.current()).isNotSameInstanceAs(Context.ROOT);
+                assertThat(Context.current()).isSameInstanceAs(context);
                 assertThat(KEY.get()).isEqualTo("myvalue");
                 tested.set(true);
               }
             },
             result);
 
-    assertThat(future.get()).isSameAs(result);
+    assertThat(future.get()).isSameInstanceAs(result);
     assertThat(tested.get()).isTrue();
   }
 
@@ -182,8 +182,8 @@
                 assertThat(ste[2].getClassName()).startsWith("io.grpc.Context$");
                 assertThat(ste[3].getClassName()).doesNotContain("Context");
 
-                assertThat(Thread.currentThread()).isNotSameAs(callerThread);
-                assertThat(Context.current()).isSameAs(context);
+                assertThat(Thread.currentThread()).isNotSameInstanceAs(callerThread);
+                assertThat(Context.current()).isSameInstanceAs(context);
                 assertThat(KEY.get()).isEqualTo("myvalue");
 
                 tested.release();
diff --git a/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationIT.java b/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationIT.java
index f718f49..602892a 100644
--- a/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationIT.java
+++ b/contrib/agent/src/integration-test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationIT.java
@@ -57,7 +57,7 @@
         new Runnable() {
           @Override
           public void run() {
-            assertThat(Context.current()).isSameAs(context);
+            assertThat(Context.current()).isSameInstanceAs(context);
             assertThat(KEY.get()).isEqualTo("myvalue");
             tested.set(true);
           }
@@ -81,7 +81,7 @@
 
       @Override
       public void run() {
-        assertThat(Context.current()).isSameAs(context);
+        assertThat(Context.current()).isSameInstanceAs(context);
         assertThat(KEY.get()).isEqualTo("myvalue");
         tested.set(true);
       }
@@ -139,6 +139,6 @@
 
     // Assert that the automatic context propagation added by ThreadInstrumentation did not
     // interfere with the automatically propagated context from Executor#execute.
-    assertThat(newThreadCtx.get()).isSameAs(context);
+    assertThat(newThreadCtx.get()).isSameInstanceAs(context);
   }
 }
diff --git a/contrib/agent/src/jmh/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationBenchmark.java b/contrib/agent/src/jmh/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationBenchmark.java
index 706c6d3..92914cd 100644
--- a/contrib/agent/src/jmh/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationBenchmark.java
+++ b/contrib/agent/src/jmh/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationBenchmark.java
@@ -67,7 +67,7 @@
   @OutputTimeUnit(TimeUnit.MICROSECONDS)
   @Fork
   public void manual(Blackhole blackhole) throws InterruptedException {
-    Thread t = new Thread((Context.current().wrap(new MyRunnable(blackhole))));
+    Thread t = new Thread(Context.current().wrap(new MyRunnable(blackhole)));
     t.start();
     t.join();
   }
diff --git a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentation.java b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentation.java
index 1e1429c..07478f8 100644
--- a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentation.java
+++ b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentation.java
@@ -99,7 +99,7 @@
      * @see Advice
      */
     @Advice.OnMethodEnter
-    @SuppressWarnings(value = "UnusedAssignment")
+    @SuppressWarnings({"UnusedAssignment", "unused"})
     @SuppressFBWarnings(value = {"DLS_DEAD_LOCAL_STORE", "UPM_UNCALLED_PRIVATE_METHOD"})
     private static void enter(@Advice.Argument(value = 0, readOnly = false) Runnable runnable) {
       runnable = ContextTrampoline.wrapInCurrentContext(runnable);
diff --git a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentation.java b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentation.java
index b4beba8..a7c9418 100644
--- a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentation.java
+++ b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentation.java
@@ -83,6 +83,7 @@
      * @see Advice
      */
     @Advice.OnMethodEnter
+    @SuppressWarnings("unused")
     @SuppressFBWarnings("UPM_UNCALLED_PRIVATE_METHOD")
     private static void enter(@Advice.This Thread thread) {
       ContextTrampoline.saveContextForThread(thread);
@@ -100,6 +101,7 @@
      * @see Advice
      */
     @Advice.OnMethodEnter
+    @SuppressWarnings("unused")
     @SuppressFBWarnings("UPM_UNCALLED_PRIVATE_METHOD")
     private static void enter(@Advice.This Thread thread) {
       ContextTrampoline.attachContextForThread(thread);
diff --git a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentation.java b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentation.java
index 336f70b..27b4ef5 100644
--- a/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentation.java
+++ b/contrib/agent/src/main/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentation.java
@@ -81,6 +81,7 @@
      * @see Advice
      */
     @Advice.OnMethodEnter
+    @SuppressWarnings("unused")
     @SuppressFBWarnings("UPM_UNCALLED_PRIVATE_METHOD")
     @MustBeClosed
     private static Closeable enter(@Advice.Origin("#t\\##m") String classAndMethodName) {
@@ -99,6 +100,7 @@
      * @see Advice
      */
     @Advice.OnMethodExit(onThrowable = Throwable.class)
+    @SuppressWarnings("unused")
     @SuppressFBWarnings("UPM_UNCALLED_PRIVATE_METHOD")
     private static void exit(@Advice.Enter Closeable scope, @Advice.Thrown Throwable throwable) {
       TraceTrampoline.endScope(scope, throwable);
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/ResourcesTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/ResourcesTest.java
index 26eb696..33bc7dd 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/ResourcesTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/ResourcesTest.java
@@ -31,7 +31,7 @@
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link Resources}. */
 @RunWith(MockitoJUnitRunner.class)
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/ContextTrampolineTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/ContextTrampolineTest.java
index 4ed7120..f693dc3 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/ContextTrampolineTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/ContextTrampolineTest.java
@@ -24,7 +24,7 @@
 import org.junit.runner.RunWith;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link ContextTrampoline}. */
 @RunWith(MockitoJUnitRunner.class)
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/TraceTrampolineTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/TraceTrampolineTest.java
index f1ca350..fab72db 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/TraceTrampolineTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/bootstrap/TraceTrampolineTest.java
@@ -25,7 +25,7 @@
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link TraceTrampoline}. */
 @RunWith(MockitoJUnitRunner.class)
@@ -55,6 +55,6 @@
     Closeable closeable = TraceTrampoline.startScopedSpan("test");
 
     Mockito.verify(mockTraceStrategy).startScopedSpan("test");
-    assertThat(closeable).isSameAs(mockCloseable);
+    assertThat(closeable).isSameInstanceAs(mockCloseable);
   }
 }
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationTest.java
index 75d8940..5e244f3 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ExecutorInstrumentationTest.java
@@ -23,7 +23,7 @@
 import net.bytebuddy.agent.builder.AgentBuilder;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link ExecutorInstrumentation}. */
 @RunWith(MockitoJUnitRunner.class)
@@ -41,7 +41,7 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isSameAs(agentBuilder);
+    assertThat(agentBuilder2).isSameInstanceAs(agentBuilder);
   }
 
   @Test
@@ -50,6 +50,6 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isNotSameAs(agentBuilder);
+    assertThat(agentBuilder2).isNotSameInstanceAs(agentBuilder);
   }
 }
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationTest.java
index 4585c37..c12d0cf 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/ThreadInstrumentationTest.java
@@ -23,7 +23,7 @@
 import net.bytebuddy.agent.builder.AgentBuilder;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link ThreadInstrumentation}. */
 @RunWith(MockitoJUnitRunner.class)
@@ -41,7 +41,7 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isSameAs(agentBuilder);
+    assertThat(agentBuilder2).isSameInstanceAs(agentBuilder);
   }
 
   @Test
@@ -50,6 +50,6 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isNotSameAs(agentBuilder);
+    assertThat(agentBuilder2).isNotSameInstanceAs(agentBuilder);
   }
 }
diff --git a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentationTest.java b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentationTest.java
index 3fa1249..85f3926 100644
--- a/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentationTest.java
+++ b/contrib/agent/src/test/java/io/opencensus/contrib/agent/instrumentation/UrlInstrumentationTest.java
@@ -23,7 +23,7 @@
 import net.bytebuddy.agent.builder.AgentBuilder;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 /** Unit tests for {@link UrlInstrumentation}. */
 @RunWith(MockitoJUnitRunner.class)
@@ -41,7 +41,7 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isSameAs(agentBuilder);
+    assertThat(agentBuilder2).isSameInstanceAs(agentBuilder);
   }
 
   @Test
@@ -50,6 +50,6 @@
 
     AgentBuilder agentBuilder2 = instrumentation.instrument(agentBuilder, settings);
 
-    assertThat(agentBuilder2).isNotSameAs(agentBuilder);
+    assertThat(agentBuilder2).isNotSameInstanceAs(agentBuilder);
   }
 }
diff --git a/contrib/appengine_standard_util/README.md b/contrib/appengine_standard_util/README.md
index 3ff5a0a..004111d 100644
--- a/contrib/appengine_standard_util/README.md
+++ b/contrib/appengine_standard_util/README.md
@@ -16,14 +16,14 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-appengine-standard-util</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-contrib-appengine-standard-util:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-contrib-appengine-standard-util:0.28.3'
 ```
 
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
diff --git a/contrib/appengine_standard_util/build.gradle b/contrib/appengine_standard_util/build.gradle
index a5c122a..86d6505 100644
--- a/contrib/appengine_standard_util/build.gradle
+++ b/contrib/appengine_standard_util/build.gradle
@@ -3,14 +3,14 @@
 apply plugin: 'java'
 apply plugin: 'com.google.protobuf'
 
-def protocVersion = '3.5.1-1'
+def protocVersion = '3.11.4'
 
 buildscript {
     repositories {
         maven { url "https://plugins.gradle.org/m2/" }
     }
     dependencies {
-        classpath "com.google.protobuf:protobuf-gradle-plugin:0.8.5"
+        classpath "com.google.protobuf:protobuf-gradle-plugin:0.8.8"
     }
 }
 
diff --git a/contrib/dropwizard/README.md b/contrib/dropwizard/README.md
index 0010d00..6d6d55d 100644
--- a/contrib/dropwizard/README.md
+++ b/contrib/dropwizard/README.md
@@ -18,14 +18,14 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-dropwizard</artifactId>
-    <version>0.17.0</version>
+    <version>0.28.3</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-dropwizard:0.17.0'
+```groovy
+compile 'io.opencensus:opencensus-contrib-dropwizard:0.28.3'
 ```
 
 ### And the following code:
diff --git a/contrib/dropwizard/src/main/java/io/opencensus/contrib/dropwizard/DropWizardMetrics.java b/contrib/dropwizard/src/main/java/io/opencensus/contrib/dropwizard/DropWizardMetrics.java
index d923183..2e1db5c 100644
--- a/contrib/dropwizard/src/main/java/io/opencensus/contrib/dropwizard/DropWizardMetrics.java
+++ b/contrib/dropwizard/src/main/java/io/opencensus/contrib/dropwizard/DropWizardMetrics.java
@@ -20,6 +20,7 @@
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
 import com.codahale.metrics.Timer;
 import io.opencensus.common.Clock;
 import io.opencensus.common.Timestamp;
@@ -54,8 +55,11 @@
  * @since 0.17
  */
 public class DropWizardMetrics extends MetricProducer {
+
   @DefaultVisibilityForTesting static final String DEFAULT_UNIT = "1";
+  @DefaultVisibilityForTesting static final String NS_UNIT = "ns";
   private final List<com.codahale.metrics.MetricRegistry> metricRegistryList;
+  private final MetricFilter metricFilter;
   private final Clock clock;
   private final Timestamp cumulativeStartTimestamp;
 
@@ -66,10 +70,26 @@
    * @since 0.17
    */
   public DropWizardMetrics(List<com.codahale.metrics.MetricRegistry> metricRegistryList) {
-    Utils.checkNotNull(metricRegistryList, "metricRegistryList");
-    Utils.checkListElementNotNull(metricRegistryList, "metricRegistryList");
+    this(metricRegistryList, MetricFilter.ALL);
+  }
+
+  /**
+   * Hook the Dropwizard registry into the OpenCensus registry.
+   *
+   * @param metricRegistryList a list of {@link com.codahale.metrics.MetricRegistry}s.
+   * @param metricFilter a filter to choose which metric to export
+   * @since 0.19
+   */
+  public DropWizardMetrics(
+      List<com.codahale.metrics.MetricRegistry> metricRegistryList, MetricFilter metricFilter) {
+    Utils.checkListElementNotNull(
+        Utils.checkNotNull(metricRegistryList, "metricRegistryList"), "metricRegistry");
     this.metricRegistryList = metricRegistryList;
+    this.metricFilter = Utils.checkNotNull(metricFilter, "metricFilter");
     clock = MillisClock.getInstance();
+
+    // TODO(mayurkale): consider to add cache map<string, CacheEntry> where CacheEntry is
+    // {MetricDescriptor, startTime}
     cumulativeStartTimestamp = clock.now();
   }
 
@@ -81,7 +101,8 @@
    * @return a {@code Metric}.
    */
   @SuppressWarnings("rawtypes")
-  private @Nullable Metric collectGauge(String dropwizardName, Gauge gauge) {
+  @Nullable
+  private Metric collectGauge(String dropwizardName, Gauge gauge) {
     String metricName = DropWizardUtils.generateFullMetricName(dropwizardName, "gauge");
     String metricDescription = DropWizardUtils.generateFullMetricDescription(dropwizardName, gauge);
 
@@ -159,7 +180,7 @@
         TimeSeries.createWithOnePoint(
             Collections.<LabelValue>emptyList(),
             Point.create(Value.longValue(meter.getCount()), clock.now()),
-            null);
+            cumulativeStartTimestamp);
 
     return Metric.createWithOneTimeSeries(metricDescriptor, timeSeries);
   }
@@ -176,7 +197,7 @@
     String metricDescription =
         DropWizardUtils.generateFullMetricDescription(dropwizardName, histogram);
     return collectSnapshotAndCount(
-        metricName, metricDescription, histogram.getSnapshot(), histogram.getCount());
+        metricName, metricDescription, DEFAULT_UNIT, histogram.getSnapshot(), histogram.getCount());
   }
 
   /**
@@ -190,7 +211,7 @@
     String metricName = DropWizardUtils.generateFullMetricName(dropwizardName, "timer");
     String metricDescription = DropWizardUtils.generateFullMetricDescription(dropwizardName, timer);
     return collectSnapshotAndCount(
-        metricName, metricDescription, timer.getSnapshot(), timer.getCount());
+        metricName, metricDescription, NS_UNIT, timer.getSnapshot(), timer.getCount());
   }
 
   /**
@@ -198,6 +219,7 @@
    *
    * @param metricName the metric name.
    * @param metricDescription the metric description.
+   * @param unit the metric descriptor unit.
    * @param codahaleSnapshot the snapshot object to collect
    * @param count the value or count
    * @return a {@code Metric}.
@@ -205,6 +227,7 @@
   private Metric collectSnapshotAndCount(
       String metricName,
       String metricDescription,
+      String unit,
       com.codahale.metrics.Snapshot codahaleSnapshot,
       long count) {
     List<ValueAtPercentile> valueAtPercentiles =
@@ -222,11 +245,7 @@
     // TODO(mayurkale): OPTIMIZATION: Cache the MetricDescriptor objects.
     MetricDescriptor metricDescriptor =
         MetricDescriptor.create(
-            metricName,
-            metricDescription,
-            DEFAULT_UNIT,
-            Type.SUMMARY,
-            Collections.<LabelKey>emptyList());
+            metricName, metricDescription, unit, Type.SUMMARY, Collections.<LabelKey>emptyList());
     TimeSeries timeSeries =
         TimeSeries.createWithOnePoint(
             Collections.<LabelValue>emptyList(), point, cumulativeStartTimestamp);
@@ -240,26 +259,28 @@
     ArrayList<Metric> metrics = new ArrayList<Metric>();
 
     for (com.codahale.metrics.MetricRegistry metricRegistry : metricRegistryList) {
-      for (Entry<String, Counter> counterEntry : metricRegistry.getCounters().entrySet()) {
+      for (Entry<String, Counter> counterEntry :
+          metricRegistry.getCounters(metricFilter).entrySet()) {
         metrics.add(collectCounter(counterEntry.getKey(), counterEntry.getValue()));
       }
 
-      for (Entry<String, Gauge> gaugeEntry : metricRegistry.getGauges().entrySet()) {
+      for (Entry<String, Gauge> gaugeEntry : metricRegistry.getGauges(metricFilter).entrySet()) {
         Metric metric = collectGauge(gaugeEntry.getKey(), gaugeEntry.getValue());
         if (metric != null) {
           metrics.add(metric);
         }
       }
 
-      for (Entry<String, Meter> counterEntry : metricRegistry.getMeters().entrySet()) {
+      for (Entry<String, Meter> counterEntry : metricRegistry.getMeters(metricFilter).entrySet()) {
         metrics.add(collectMeter(counterEntry.getKey(), counterEntry.getValue()));
       }
 
-      for (Entry<String, Histogram> counterEntry : metricRegistry.getHistograms().entrySet()) {
+      for (Entry<String, Histogram> counterEntry :
+          metricRegistry.getHistograms(metricFilter).entrySet()) {
         metrics.add(collectHistogram(counterEntry.getKey(), counterEntry.getValue()));
       }
 
-      for (Entry<String, Timer> counterEntry : metricRegistry.getTimers().entrySet()) {
+      for (Entry<String, Timer> counterEntry : metricRegistry.getTimers(metricFilter).entrySet()) {
         metrics.add(collectTimer(counterEntry.getKey(), counterEntry.getValue()));
       }
     }
diff --git a/contrib/dropwizard/src/test/java/io/opencensus/contrib/dropwizard/DropWizardMetricsTest.java b/contrib/dropwizard/src/test/java/io/opencensus/contrib/dropwizard/DropWizardMetricsTest.java
index 2b41e9b..499434e 100644
--- a/contrib/dropwizard/src/test/java/io/opencensus/contrib/dropwizard/DropWizardMetricsTest.java
+++ b/contrib/dropwizard/src/test/java/io/opencensus/contrib/dropwizard/DropWizardMetricsTest.java
@@ -18,13 +18,14 @@
 
 import static com.google.common.truth.Truth.assertThat;
 import static io.opencensus.contrib.dropwizard.DropWizardMetrics.DEFAULT_UNIT;
+import static io.opencensus.contrib.dropwizard.DropWizardMetrics.NS_UNIT;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
 import com.codahale.metrics.Timer;
-import io.opencensus.common.Timestamp;
 import io.opencensus.metrics.LabelKey;
 import io.opencensus.metrics.export.Metric;
 import io.opencensus.metrics.export.MetricDescriptor;
@@ -35,6 +36,7 @@
 import io.opencensus.metrics.export.Value;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import org.junit.Before;
 import org.junit.Test;
@@ -46,17 +48,16 @@
 public class DropWizardMetricsTest {
 
   private com.codahale.metrics.MetricRegistry metricRegistry;
-  DropWizardMetrics dropWizardMetrics;
+  private DropWizardMetrics dropWizardMetrics;
 
   @Before
-  public void setUp() throws Exception {
+  public void setUp() {
     metricRegistry = new com.codahale.metrics.MetricRegistry();
-    dropWizardMetrics = new DropWizardMetrics(Collections.singletonList((metricRegistry)));
+    dropWizardMetrics = new DropWizardMetrics(Collections.singletonList(metricRegistry));
   }
 
   @Test
-  public void collect() throws InterruptedException {
-
+  public void collect_Counter() {
     // create dropwizard metrics
     Counter evictions = metricRegistry.counter("cache_evictions");
     evictions.inc();
@@ -64,6 +65,28 @@
     evictions.dec();
     evictions.dec(2);
 
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(1);
+
+    assertThat(metrics.get(0).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "codahale_cache_evictions_counter",
+                "Collected from codahale (metric=cache_evictions, "
+                    + "type=com.codahale.metrics.Counter)",
+                DEFAULT_UNIT,
+                Type.GAUGE_INT64,
+                Collections.<LabelKey>emptyList()));
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(1));
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+  }
+
+  @Test
+  public void collect_Gauge() {
     Gauge<Integer> integerGauge =
         new Gauge<Integer>() {
           @Override
@@ -109,54 +132,26 @@
         };
     metricRegistry.register("boolean_gauge", boolGauge);
 
-    Meter getRequests = metricRegistry.meter("get_requests");
-    getRequests.mark();
-    getRequests.mark();
-
-    Histogram resultCounts = metricRegistry.histogram("result");
-    resultCounts.update(200);
-
-    Timer timer = metricRegistry.timer("requests");
-    Timer.Context context = timer.time();
-    Thread.sleep(1L);
-    context.stop();
-
-    ArrayList<Metric> metrics = new ArrayList<Metric>(dropWizardMetrics.getMetrics());
-    assertThat(metrics.size()).isEqualTo(9);
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(5);
 
     assertThat(metrics.get(0).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
-                "codahale_cache_evictions_counter",
-                "Collected from codahale (metric=cache_evictions, "
-                    + "type=com.codahale.metrics.Counter)",
-                DEFAULT_UNIT,
-                Type.GAUGE_INT64,
-                Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
-        .isEqualTo(Value.longValue(1));
-    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
-
-    assertThat(metrics.get(1).getMetricDescriptor())
-        .isEqualTo(
-            MetricDescriptor.create(
                 "codahale_boolean_gauge_gauge",
                 "Collected from codahale (metric=boolean_gauge, "
                     + "type=io.opencensus.contrib.dropwizard.DropWizardMetricsTest$5)",
                 DEFAULT_UNIT,
                 Type.GAUGE_INT64,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(1).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(1).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.longValue(1));
-    assertThat(metrics.get(1).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
 
-    assertThat(metrics.get(2).getMetricDescriptor())
+    assertThat(metrics.get(1).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_double_gauge_gauge",
@@ -165,14 +160,14 @@
                 DEFAULT_UNIT,
                 Type.GAUGE_DOUBLE,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(2).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(2).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(1).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.doubleValue(1.234));
-    assertThat(metrics.get(2).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
 
-    assertThat(metrics.get(3).getMetricDescriptor())
+    assertThat(metrics.get(2).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_float_gauge_gauge",
@@ -181,14 +176,14 @@
                 DEFAULT_UNIT,
                 Type.GAUGE_DOUBLE,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(3).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(3).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(2).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.doubleValue(0.1234000027179718));
-    assertThat(metrics.get(3).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
 
-    assertThat(metrics.get(4).getMetricDescriptor())
+    assertThat(metrics.get(3).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_integer_gauge_gauge",
@@ -197,14 +192,14 @@
                 DEFAULT_UNIT,
                 Type.GAUGE_DOUBLE,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(4).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(4).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(3).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.doubleValue(1234.0));
-    assertThat(metrics.get(4).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
 
-    assertThat(metrics.get(5).getMetricDescriptor())
+    assertThat(metrics.get(4).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_long_gauge_gauge",
@@ -213,14 +208,24 @@
                 DEFAULT_UNIT,
                 Type.GAUGE_DOUBLE,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(5).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(5).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(5).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(5).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(4).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.doubleValue(1234.0));
-    assertThat(metrics.get(5).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+  }
 
-    assertThat(metrics.get(6).getMetricDescriptor())
+  @Test
+  public void collect_Meter() {
+    Meter getRequests = metricRegistry.meter("get_requests");
+    getRequests.mark();
+    getRequests.mark();
+
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(1);
+
+    assertThat(metrics.get(0).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_get_requests_meter",
@@ -229,25 +234,34 @@
                 DEFAULT_UNIT,
                 Type.CUMULATIVE_INT64,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(6).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(6).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(6).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(6).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(Value.longValue(2));
-    assertThat(metrics.get(6).getTimeSeriesList().get(0).getStartTimestamp()).isEqualTo(null);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNotNull();
+  }
 
-    assertThat(metrics.get(7).getMetricDescriptor())
+  @Test
+  public void collect_Histogram() {
+    Histogram resultCounts = metricRegistry.histogram("result");
+    resultCounts.update(200);
+
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(1);
+
+    assertThat(metrics.get(0).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_result_histogram",
-                "Collected from codahale (metric=result, " + "type=com.codahale.metrics.Histogram)",
+                "Collected from codahale (metric=result, type=com.codahale.metrics.Histogram)",
                 DEFAULT_UNIT,
                 Type.SUMMARY,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(7).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(7).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(7).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(7).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(
             Value.summaryValue(
                 Summary.create(
@@ -262,21 +276,31 @@
                             ValueAtPercentile.create(98.0, 200.0),
                             ValueAtPercentile.create(99.0, 200.0),
                             ValueAtPercentile.create(99.9, 200.0))))));
-    assertThat(metrics.get(7).getTimeSeriesList().get(0).getStartTimestamp())
-        .isInstanceOf(Timestamp.class);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNotNull();
+  }
 
-    assertThat(metrics.get(8).getMetricDescriptor())
+  @Test
+  public void collect_Timer() throws InterruptedException {
+    Timer timer = metricRegistry.timer("requests");
+    Timer.Context context = timer.time();
+    Thread.sleep(1L);
+    context.stop();
+
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(1);
+
+    assertThat(metrics.get(0).getMetricDescriptor())
         .isEqualTo(
             MetricDescriptor.create(
                 "codahale_requests_timer",
                 "Collected from codahale (metric=requests, " + "type=com.codahale.metrics.Timer)",
-                DEFAULT_UNIT,
+                NS_UNIT,
                 Type.SUMMARY,
                 Collections.<LabelKey>emptyList()));
-    assertThat(metrics.get(8).getTimeSeriesList().size()).isEqualTo(1);
-    assertThat(metrics.get(8).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
-    assertThat(metrics.get(8).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
-    assertThat(metrics.get(8).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
         .isEqualTo(
             Value.summaryValue(
                 Summary.create(
@@ -292,13 +316,30 @@
                             ValueAtPercentile.create(99.0, timer.getSnapshot().get99thPercentile()),
                             ValueAtPercentile.create(
                                 99.9, timer.getSnapshot().get999thPercentile()))))));
-
-    assertThat(metrics.get(8).getTimeSeriesList().get(0).getStartTimestamp())
-        .isInstanceOf(Timestamp.class);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNotNull();
   }
 
   @Test
   public void empty_GetMetrics() {
     assertThat(dropWizardMetrics.getMetrics()).isEmpty();
   }
+
+  @Test
+  public void filter_GetMetrics() {
+    MetricFilter filter =
+        new MetricFilter() {
+          @Override
+          public boolean matches(String name, com.codahale.metrics.Metric metric) {
+            return name.startsWith("test");
+          }
+        };
+    dropWizardMetrics = new DropWizardMetrics(Collections.singletonList(metricRegistry), filter);
+    metricRegistry.timer("test_requests");
+    metricRegistry.timer("requests");
+
+    Collection<Metric> metrics = dropWizardMetrics.getMetrics();
+    assertThat(metrics).hasSize(1);
+    Metric value = metrics.iterator().next();
+    assertThat(value.getMetricDescriptor().getName()).isEqualTo("codahale_test_requests_timer");
+  }
 }
diff --git a/contrib/dropwizard5/README.md b/contrib/dropwizard5/README.md
new file mode 100644
index 0000000..28f88bd
--- /dev/null
+++ b/contrib/dropwizard5/README.md
@@ -0,0 +1,125 @@
+# OpenCensus DropWizard Util for Java
+
+The *OpenCensus DropWizard Util for Java* provides an easy way to translate Dropwizard metrics to
+OpenCensus.
+
+## Quickstart
+
+### Prerequisites
+
+Assuming, you already have both the OpenCensus and Dropwizard client libraries setup and working
+inside your application.
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-contrib-dropwizard5</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-contrib-dropwizard5:0.28.3'
+```
+
+### And the following code:
+
+```java
+import java.util.Collections;
+
+public class YourClass {
+  // Create registry for Dropwizard metrics.
+  static final io.dropwizard.metrics5.MetricRegistry codahaleRegistry =
+    new io.dropwizard.metrics5.MetricRegistry();
+
+  // Create a Dropwizard counter.
+  Map<String, String> tags = new HashMap<>();
+  tags.put("tag1", "value1");
+  tags.put("tag2", "value2");
+  static final io.dropwizard.metrics5.Counter requests =
+    codahaleRegistry.counter(new MetricName("requests", tags));
+
+  public static void main(String[] args) {
+
+    // Increment the requests.
+    requests.inc();
+
+    // Hook the Dropwizard registry into the OpenCensus registry
+    // via the DropWizardMetrics metric producer.
+    io.opencensus.metrics.Metrics.getExportComponent().getMetricProducerManager().add(
+          new io.opencensus.contrib.dropwizard.DropWizardMetrics(
+            Collections.singletonList(codahaleRegistry)));
+
+  }
+}
+```
+
+## Translation to OpenCensus Metrics
+
+This section describes how each of the DropWizard metrics translate into OpenCensus metrics.
+
+### DropWizard Counters
+
+Given a DropWizard Counter with name `cache_evictions`, the following values are reported:
+
+* name: dropwizard5_<initial_metric_name>_<initial_type> (ex: codahale_cache_evictions_counter)
+* description: Collected from Dropwizard (metric=<metric_name>, type=<class_name>)
+(ex: Collected from Dropwizard (metric=cache_evictions, type=io.dropwizard.metrics5.Counter))
+* type: GAUGE_INT64
+* unit: 1
+* labels: metrics tags are converted to label keys/values
+
+Note: OpenCensus's CUMULATIVE_INT64 type represent monotonically increasing values. Since
+DropWizard Counter goes up/down, it make sense to report them as OpenCensus GAUGE_INT64.
+
+### DropWizard Gauges
+
+Given a DropWizard Gauge with name `line_requests`, the following values are reported:
+
+* name: dropwizard5_<initial_metric_name>_<initial_type> (ex: codahale_line_requests_gauge)
+* description: Collected from Dropwizard (metric=<metric_name>, type=<class_name>)
+* type: GAUGE_INT64 or GAUGE_DOUBLE
+* unit: 1
+* labels: metrics tags are converted to label keys/values
+
+
+Note: For simplicity, OpenCensus uses GAUGE_DOUBLE type for any Number and GAUGE_INT64
+type for Boolean values.
+
+### DropWizard Meters
+
+Given a DropWizard Meter with name `get_requests`, the following values are reported:
+
+* name: dropwizard5_<initial_metric_name>_<initial_type> (ex: codahale_get_requests_meter)
+* description: Collected from Dropwizard (metric=<metric_name>, type=<class_name>)
+* type: CUMULATIVE_INT64
+* unit: 1
+* labels: metrics tags are converted to label keys/values
+
+
+### DropWizard Histograms
+
+Given a DropWizard Histogram with name `results`, the following values are reported:
+
+* name: dropwizard5_<initial_metric_name>_<initial_type> (ex: codahale_results_histogram)
+* description: Collected from Dropwizard (metric=<metric_name>, type=<class_name>)
+* type: SUMMARY
+* unit: 1
+* labels: metrics tags are converted to label keys/values
+
+
+### DropWizard Timers
+
+Given a DropWizard Timer with name `requests`, the following values are reported:
+* name: dropwizard5_<initial_metric_name>_<initial_type> (ex: codahale_requests_timer)
+* description: Collected from Dropwizard (metric=<metric_name>, type=<class_name>)
+* type: SUMMARY
+* unit: 
+* labels: metrics tags are converted to label keys/values
+
diff --git a/contrib/dropwizard5/build.gradle b/contrib/dropwizard5/build.gradle
new file mode 100644
index 0000000..4a647e2
--- /dev/null
+++ b/contrib/dropwizard5/build.gradle
@@ -0,0 +1,17 @@
+description = 'OpenCensus dropwizard5 util'
+
+apply plugin: 'java'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.8
+    it.targetCompatibility = 1.8
+}
+
+dependencies {
+    compile project(':opencensus-api'),
+            project(':opencensus-impl-core')
+
+    compile libraries.dropwizard5
+
+    signature "org.codehaus.mojo.signature:java18:+@signature"
+}
diff --git a/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardMetrics.java b/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardMetrics.java
new file mode 100644
index 0000000..a97282c
--- /dev/null
+++ b/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardMetrics.java
@@ -0,0 +1,315 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.dropwizard5;
+
+import io.dropwizard.metrics5.Counter;
+import io.dropwizard.metrics5.Gauge;
+import io.dropwizard.metrics5.Histogram;
+import io.dropwizard.metrics5.Meter;
+import io.dropwizard.metrics5.MetricFilter;
+import io.dropwizard.metrics5.MetricName;
+import io.dropwizard.metrics5.Timer;
+import io.opencensus.common.Clock;
+import io.opencensus.common.Timestamp;
+import io.opencensus.implcore.common.MillisClock;
+import io.opencensus.internal.DefaultVisibilityForTesting;
+import io.opencensus.internal.Utils;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+
+/**
+ * Collects DropWizard metrics from a list {@link io.dropwizard.metrics5.MetricRegistry}s.
+ *
+ * <p>A {@link io.opencensus.metrics.export.MetricProducer} that wraps a DropWizardMetrics.
+ *
+ * @since 0.19
+ */
+public class DropWizardMetrics extends MetricProducer {
+
+  @DefaultVisibilityForTesting static final String DEFAULT_UNIT = "1";
+  @DefaultVisibilityForTesting static final String NS_UNIT = "ns";
+  private final List<io.dropwizard.metrics5.MetricRegistry> metricRegistryList;
+  private final MetricFilter metricFilter;
+  private final Clock clock;
+  private final Timestamp cumulativeStartTimestamp;
+
+  /**
+   * Hook the Dropwizard registry into the OpenCensus registry.
+   *
+   * @param metricRegistryList a list of {@link io.dropwizard.metrics5.MetricRegistry}s.
+   * @since 0.19
+   */
+  public DropWizardMetrics(List<io.dropwizard.metrics5.MetricRegistry> metricRegistryList) {
+    this(metricRegistryList, MetricFilter.ALL);
+  }
+
+  /**
+   * Hook the Dropwizard registry into the OpenCensus registry.
+   *
+   * @param metricRegistryList a list of {@link io.dropwizard.metrics5.MetricRegistry}s.
+   * @param metricFilter a filter to choose which metric to export
+   * @since 0.19
+   */
+  public DropWizardMetrics(
+      List<io.dropwizard.metrics5.MetricRegistry> metricRegistryList, MetricFilter metricFilter) {
+    Utils.checkListElementNotNull(
+        Utils.checkNotNull(metricRegistryList, "metricRegistryList"), "metricRegistry");
+    this.metricRegistryList = metricRegistryList;
+    this.metricFilter = Utils.checkNotNull(metricFilter, "metricFilter");
+    clock = MillisClock.getInstance();
+
+    // TODO(mayurkale): consider to add cache map<string, CacheEntry> where CacheEntry is
+    // {MetricDescriptor, startTime}
+    cumulativeStartTimestamp = clock.now();
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link Gauge}.
+   *
+   * @param dropwizardMetric the metric name.
+   * @param gauge the gauge object to collect.
+   * @return a {@code Metric}.
+   */
+  @SuppressWarnings("rawtypes")
+  @Nullable
+  private Metric collectGauge(MetricName dropwizardMetric, Gauge gauge) {
+    // TODO cache dropwizard MetricName -> OC MetricDescriptor, Labels conversion
+    String metricName = DropWizardUtils.generateFullMetricName(dropwizardMetric.getKey(), "gauge");
+    String metricDescription =
+        DropWizardUtils.generateFullMetricDescription(dropwizardMetric.getKey(), gauge);
+    AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> labels =
+        DropWizardUtils.generateLabels(dropwizardMetric);
+    // Figure out which gauge instance and call the right method to get value
+    Type type;
+    Value value;
+
+    Object obj = gauge.getValue();
+    if (obj instanceof Number) {
+      type = Type.GAUGE_DOUBLE;
+      value = Value.doubleValue(((Number) obj).doubleValue());
+    } else if (obj instanceof Boolean) {
+      type = Type.GAUGE_INT64;
+      value = Value.longValue(((Boolean) obj) ? 1 : 0);
+    } else {
+      // Ignoring Gauge (gauge.getKey()) with unhandled type.
+      return null;
+    }
+
+    MetricDescriptor metricDescriptor =
+        MetricDescriptor.create(metricName, metricDescription, DEFAULT_UNIT, type, labels.getKey());
+    TimeSeries timeSeries =
+        TimeSeries.createWithOnePoint(labels.getValue(), Point.create(value, clock.now()), null);
+    return Metric.createWithOneTimeSeries(metricDescriptor, timeSeries);
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link Counter}.
+   *
+   * @param dropwizardMetric the metric name.
+   * @param counter the counter object to collect.
+   * @return a {@code Metric}.
+   */
+  private Metric collectCounter(MetricName dropwizardMetric, Counter counter) {
+    String metricName =
+        DropWizardUtils.generateFullMetricName(dropwizardMetric.getKey(), "counter");
+
+    String metricDescription =
+        DropWizardUtils.generateFullMetricDescription(dropwizardMetric.getKey(), counter);
+    AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> labels =
+        DropWizardUtils.generateLabels(dropwizardMetric);
+
+    MetricDescriptor metricDescriptor =
+        MetricDescriptor.create(
+            metricName, metricDescription, DEFAULT_UNIT, Type.GAUGE_INT64, labels.getKey());
+
+    TimeSeries timeSeries =
+        TimeSeries.createWithOnePoint(
+            labels.getValue(),
+            Point.create(Value.longValue(counter.getCount()), clock.now()),
+            null);
+    return Metric.createWithOneTimeSeries(metricDescriptor, timeSeries);
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link io.dropwizard.metrics5.Meter}.
+   *
+   * @param dropwizardMetric the metric name.
+   * @param meter the meter object to collect
+   * @return a {@code Metric}.
+   */
+  private Metric collectMeter(MetricName dropwizardMetric, Meter meter) {
+    String metricName = DropWizardUtils.generateFullMetricName(dropwizardMetric.getKey(), "meter");
+    String metricDescription =
+        DropWizardUtils.generateFullMetricDescription(dropwizardMetric.getKey(), meter);
+    final AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> labels =
+        DropWizardUtils.generateLabels(dropwizardMetric);
+
+    MetricDescriptor metricDescriptor =
+        MetricDescriptor.create(
+            metricName, metricDescription, DEFAULT_UNIT, Type.CUMULATIVE_INT64, labels.getKey());
+    TimeSeries timeSeries =
+        TimeSeries.createWithOnePoint(
+            labels.getValue(),
+            Point.create(Value.longValue(meter.getCount()), clock.now()),
+            cumulativeStartTimestamp);
+
+    return Metric.createWithOneTimeSeries(metricDescriptor, timeSeries);
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link Histogram}.
+   *
+   * @param dropwizardMetric the metric name.
+   * @param histogram the histogram object to collect
+   * @return a {@code Metric}.
+   */
+  private Metric collectHistogram(MetricName dropwizardMetric, Histogram histogram) {
+    String metricName =
+        DropWizardUtils.generateFullMetricName(dropwizardMetric.getKey(), "histogram");
+    String metricDescription =
+        DropWizardUtils.generateFullMetricDescription(dropwizardMetric.getKey(), histogram);
+    final AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> labels =
+        DropWizardUtils.generateLabels(dropwizardMetric);
+
+    return collectSnapshotAndCount(
+        metricName,
+        metricDescription,
+        labels.getKey(),
+        labels.getValue(),
+        DEFAULT_UNIT,
+        histogram.getSnapshot(),
+        histogram.getCount());
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link Timer}.
+   *
+   * @param dropwizardMetric the metric name.
+   * @param timer the timer object to collect
+   * @return a {@code Metric}.
+   */
+  private Metric collectTimer(MetricName dropwizardMetric, Timer timer) {
+    String metricName = DropWizardUtils.generateFullMetricName(dropwizardMetric.getKey(), "timer");
+    String metricDescription =
+        DropWizardUtils.generateFullMetricDescription(dropwizardMetric.getKey(), timer);
+    final AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> labels =
+        DropWizardUtils.generateLabels(dropwizardMetric);
+    return collectSnapshotAndCount(
+        metricName,
+        metricDescription,
+        labels.getKey(),
+        labels.getValue(),
+        NS_UNIT,
+        timer.getSnapshot(),
+        timer.getCount());
+  }
+
+  /**
+   * Returns a {@code Metric} collected from {@link Snapshot}.
+   *
+   * @param metricName the metric name.
+   * @param metricDescription the metric description.
+   * @param labelKeys metric label keys
+   * @param labelValues metric label values
+   * @param codahaleSnapshot the snapshot object to collect
+   * @param count the value or count
+   * @return a {@code Metric}.
+   */
+  private Metric collectSnapshotAndCount(
+      String metricName,
+      String metricDescription,
+      List<LabelKey> labelKeys,
+      List<LabelValue> labelValues,
+      String unit,
+      io.dropwizard.metrics5.Snapshot codahaleSnapshot,
+      long count) {
+    List<ValueAtPercentile> valueAtPercentiles =
+        Arrays.asList(
+            ValueAtPercentile.create(50.0, codahaleSnapshot.getMedian()),
+            ValueAtPercentile.create(75.0, codahaleSnapshot.get75thPercentile()),
+            ValueAtPercentile.create(98.0, codahaleSnapshot.get98thPercentile()),
+            ValueAtPercentile.create(99.0, codahaleSnapshot.get99thPercentile()),
+            ValueAtPercentile.create(99.9, codahaleSnapshot.get999thPercentile()));
+
+    Snapshot snapshot = Snapshot.create((long) codahaleSnapshot.size(), 0.0, valueAtPercentiles);
+    Point point =
+        Point.create(Value.summaryValue(Summary.create(count, 0.0, snapshot)), clock.now());
+
+    // TODO(mayurkale): OPTIMIZATION: Cache the MetricDescriptor objects.
+    MetricDescriptor metricDescriptor =
+        MetricDescriptor.create(metricName, metricDescription, unit, Type.SUMMARY, labelKeys);
+    TimeSeries timeSeries =
+        TimeSeries.createWithOnePoint(labelValues, point, cumulativeStartTimestamp);
+
+    return Metric.createWithOneTimeSeries(metricDescriptor, timeSeries);
+  }
+
+  @Override
+  @SuppressWarnings("rawtypes")
+  public Collection<Metric> getMetrics() {
+    ArrayList<Metric> metrics = new ArrayList<Metric>();
+
+    for (io.dropwizard.metrics5.MetricRegistry metricRegistry : metricRegistryList) {
+      for (Entry<MetricName, Counter> counterEntry :
+          metricRegistry.getCounters(metricFilter).entrySet()) {
+        metrics.add(collectCounter(counterEntry.getKey(), counterEntry.getValue()));
+      }
+
+      for (Entry<MetricName, Gauge> gaugeEntry :
+          metricRegistry.getGauges(metricFilter).entrySet()) {
+        Metric metric = collectGauge(gaugeEntry.getKey(), gaugeEntry.getValue());
+        if (metric != null) {
+          metrics.add(metric);
+        }
+      }
+
+      for (Entry<MetricName, Meter> counterEntry :
+          metricRegistry.getMeters(metricFilter).entrySet()) {
+        metrics.add(collectMeter(counterEntry.getKey(), counterEntry.getValue()));
+      }
+
+      for (Entry<MetricName, Histogram> counterEntry :
+          metricRegistry.getHistograms(metricFilter).entrySet()) {
+        metrics.add(collectHistogram(counterEntry.getKey(), counterEntry.getValue()));
+      }
+
+      for (Entry<MetricName, Timer> counterEntry :
+          metricRegistry.getTimers(metricFilter).entrySet()) {
+        metrics.add(collectTimer(counterEntry.getKey(), counterEntry.getValue()));
+      }
+    }
+
+    return metrics;
+  }
+}
diff --git a/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardUtils.java b/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardUtils.java
new file mode 100644
index 0000000..e802fb3
--- /dev/null
+++ b/contrib/dropwizard5/src/main/java/io/opencensus/contrib/dropwizard5/DropWizardUtils.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.dropwizard5;
+
+import io.dropwizard.metrics5.Metric;
+import io.dropwizard.metrics5.MetricName;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** Util methods for generating the metric name(unique) and description. */
+final class DropWizardUtils {
+  private static final String SOURCE = "dropwizard5";
+  private static final char DELIMITER = '_';
+
+  /**
+   * Returns the metric name.
+   *
+   * @param name the initial metric name
+   * @param type the initial type of the metric.
+   * @return a string the unique metric name
+   */
+  static String generateFullMetricName(String name, String type) {
+    return SOURCE + DELIMITER + name + DELIMITER + type;
+  }
+
+  /**
+   * Returns the metric description.
+   *
+   * @param metricName the initial metric name
+   * @param metric the codahale metric class.
+   * @return a String the custom metric description
+   */
+  static String generateFullMetricDescription(String metricName, Metric metric) {
+    return "Collected from "
+        + SOURCE
+        + " (metric="
+        + metricName
+        + ", type="
+        + metric.getClass().getName()
+        + ")";
+  }
+
+  static AbstractMap.SimpleImmutableEntry<List<LabelKey>, List<LabelValue>> generateLabels(
+      MetricName dropwizardMetric) {
+    List<LabelKey> labelKeys = new ArrayList<>();
+    List<LabelValue> labelValues = new ArrayList<>();
+    for (Map.Entry<String, String> tag : dropwizardMetric.getTags().entrySet()) {
+      labelKeys.add(LabelKey.create(tag.getKey(), tag.getKey()));
+      labelValues.add(LabelValue.create(tag.getValue()));
+    }
+    return new AbstractMap.SimpleImmutableEntry<>(labelKeys, labelValues);
+  }
+
+  private DropWizardUtils() {}
+}
diff --git a/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardMetricsTest.java b/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardMetricsTest.java
new file mode 100644
index 0000000..5dd0963
--- /dev/null
+++ b/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardMetricsTest.java
@@ -0,0 +1,374 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.dropwizard5;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.contrib.dropwizard5.DropWizardMetrics.DEFAULT_UNIT;
+import static io.opencensus.contrib.dropwizard5.DropWizardMetrics.NS_UNIT;
+
+import io.dropwizard.metrics5.Counter;
+import io.dropwizard.metrics5.Gauge;
+import io.dropwizard.metrics5.Histogram;
+import io.dropwizard.metrics5.Meter;
+import io.dropwizard.metrics5.MetricFilter;
+import io.dropwizard.metrics5.MetricName;
+import io.dropwizard.metrics5.Timer;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.Value;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DropWizardMetrics}. */
+@RunWith(JUnit4.class)
+public class DropWizardMetricsTest {
+
+  private io.dropwizard.metrics5.MetricRegistry metricRegistry;
+  private DropWizardMetrics dropWizardMetrics;
+
+  @Before
+  public void setUp() {
+    metricRegistry = new io.dropwizard.metrics5.MetricRegistry();
+    dropWizardMetrics = new DropWizardMetrics(Collections.singletonList(metricRegistry));
+  }
+
+  @Test
+  public void collect() throws InterruptedException {
+
+    // create dropwizard metrics
+    Map<String, String> tags = new HashMap<>();
+    tags.put("tag1", "value1");
+    tags.put("tag2", "value2");
+    List<LabelKey> labelKeys = new ArrayList<>();
+    List<LabelValue> labelValues = new ArrayList<>();
+    for (Map.Entry<String, String> e : tags.entrySet()) {
+      labelKeys.add(LabelKey.create(e.getKey(), e.getKey()));
+      labelValues.add(LabelValue.create(e.getValue()));
+    }
+    Counter evictions = metricRegistry.counter(new MetricName("cache_evictions", tags));
+    evictions.inc();
+    evictions.inc(3);
+    evictions.dec();
+    evictions.dec(2);
+    metricRegistry.gauge(new MetricName("boolean_gauge", tags), BooleanGauge::new);
+    metricRegistry.gauge(new MetricName("double_gauge", tags), DoubleGauge::new);
+    metricRegistry.gauge(new MetricName("float_gauge", tags), FloatGauge::new);
+    metricRegistry.gauge(new MetricName("integer_gauge", tags), IntegerGauge::new);
+    metricRegistry.gauge(new MetricName("long_gauge", tags), LongGauge::new);
+    metricRegistry.gauge(
+        new MetricName("notags_boolean_gauge", Collections.emptyMap()), BooleanGauge::new);
+
+    Meter getRequests = metricRegistry.meter(new MetricName("get_requests", tags));
+    getRequests.mark();
+    getRequests.mark();
+
+    Histogram resultCounts = metricRegistry.histogram(new MetricName("result", tags));
+    resultCounts.update(200);
+
+    Timer timer = metricRegistry.timer(new MetricName("requests", tags));
+    Timer.Context context = timer.time();
+    Thread.sleep(1L);
+    context.stop();
+
+    ArrayList<Metric> metrics = new ArrayList<>(dropWizardMetrics.getMetrics());
+    assertThat(metrics.size()).isEqualTo(10);
+
+    assertThat(metrics.get(0).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_cache_evictions_counter",
+                "Collected from dropwizard5 (metric=cache_evictions, "
+                    + "type=io.dropwizard.metrics5.Counter)",
+                DEFAULT_UNIT,
+                Type.GAUGE_INT64,
+                labelKeys));
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(1));
+    assertThat(metrics.get(0).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    // boolean gauge with tags
+    assertThat(metrics.get(1).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_boolean_gauge_gauge",
+                "Collected from dropwizard5 (metric=boolean_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$BooleanGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_INT64,
+                labelKeys));
+    assertThat(metrics.get(1).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(1));
+    assertThat(metrics.get(1).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    assertThat(metrics.get(2).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_double_gauge_gauge",
+                "Collected from dropwizard5 (metric=double_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$DoubleGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_DOUBLE,
+                labelKeys));
+    assertThat(metrics.get(2).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.doubleValue(1.234));
+    assertThat(metrics.get(2).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    assertThat(metrics.get(3).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_float_gauge_gauge",
+                "Collected from dropwizard5 (metric=float_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$FloatGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_DOUBLE,
+                labelKeys));
+    assertThat(metrics.get(3).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.doubleValue(0.1234000027179718));
+    assertThat(metrics.get(3).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    assertThat(metrics.get(4).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_integer_gauge_gauge",
+                "Collected from dropwizard5 (metric=integer_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$IntegerGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_DOUBLE,
+                labelKeys));
+    assertThat(metrics.get(4).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.doubleValue(1234.0));
+    assertThat(metrics.get(4).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    assertThat(metrics.get(5).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_long_gauge_gauge",
+                "Collected from dropwizard5 (metric=long_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$LongGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_DOUBLE,
+                labelKeys));
+    assertThat(metrics.get(5).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(5).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(5).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(5).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(5).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.doubleValue(1234.0));
+    assertThat(metrics.get(5).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    // boolean gauge with tags
+    assertThat(metrics.get(6).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_notags_boolean_gauge_gauge",
+                "Collected from dropwizard5 (metric=notags_boolean_gauge, "
+                    + "type=io.opencensus.contrib.dropwizard5.DropWizardMetricsTest$BooleanGauge)",
+                DEFAULT_UNIT,
+                Type.GAUGE_INT64,
+                Collections.emptyList()));
+    assertThat(metrics.get(6).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(6).getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(0);
+    assertThat(metrics.get(6).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(6).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(1));
+    assertThat(metrics.get(6).getTimeSeriesList().get(0).getStartTimestamp()).isNull();
+
+    assertThat(metrics.get(7).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_get_requests_meter",
+                "Collected from dropwizard5 (metric=get_requests, "
+                    + "type=io.dropwizard.metrics5.Meter)",
+                DEFAULT_UNIT,
+                Type.CUMULATIVE_INT64,
+                labelKeys));
+    assertThat(metrics.get(7).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(7).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(7).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(7).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(7).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(2));
+    assertThat(metrics.get(7).getTimeSeriesList().get(0).getStartTimestamp()).isNotNull();
+
+    assertThat(metrics.get(8).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_result_histogram",
+                "Collected from dropwizard5 (metric=result, "
+                    + "type=io.dropwizard.metrics5.Histogram)",
+                DEFAULT_UNIT,
+                Type.SUMMARY,
+                labelKeys));
+    assertThat(metrics.get(8).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(8).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(8).getTimeSeriesList().get(0).getLabelValues()).isEqualTo(labelValues);
+    assertThat(metrics.get(8).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(8).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(
+            Value.summaryValue(
+                Summary.create(
+                    1L,
+                    0.0,
+                    Snapshot.create(
+                        1L,
+                        0.0,
+                        Arrays.asList(
+                            ValueAtPercentile.create(50.0, 200.0),
+                            ValueAtPercentile.create(75.0, 200.0),
+                            ValueAtPercentile.create(98.0, 200.0),
+                            ValueAtPercentile.create(99.0, 200.0),
+                            ValueAtPercentile.create(99.9, 200.0))))));
+    assertThat(metrics.get(8).getTimeSeriesList().get(0).getStartTimestamp())
+        .isInstanceOf(Timestamp.class);
+
+    assertThat(metrics.get(9).getMetricDescriptor())
+        .isEqualTo(
+            MetricDescriptor.create(
+                "dropwizard5_requests_timer",
+                "Collected from dropwizard5 (metric=requests, "
+                    + "type=io.dropwizard.metrics5.Timer)",
+                NS_UNIT,
+                Type.SUMMARY,
+                labelKeys));
+    assertThat(metrics.get(9).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(9).getTimeSeriesList().get(0).getLabelValues().size())
+        .isEqualTo(tags.size());
+    assertThat(metrics.get(9).getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metrics.get(9).getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(
+            Value.summaryValue(
+                Summary.create(
+                    1L,
+                    0.0,
+                    Snapshot.create(
+                        1L,
+                        0.0,
+                        Arrays.asList(
+                            ValueAtPercentile.create(50.0, timer.getSnapshot().getMedian()),
+                            ValueAtPercentile.create(75.0, timer.getSnapshot().get75thPercentile()),
+                            ValueAtPercentile.create(98.0, timer.getSnapshot().get98thPercentile()),
+                            ValueAtPercentile.create(99.0, timer.getSnapshot().get99thPercentile()),
+                            ValueAtPercentile.create(
+                                99.9, timer.getSnapshot().get999thPercentile()))))));
+
+    assertThat(metrics.get(9).getTimeSeriesList().get(0).getStartTimestamp())
+        .isInstanceOf(Timestamp.class);
+  }
+
+  @Test
+  public void empty_GetMetrics() {
+    assertThat(dropWizardMetrics.getMetrics()).isEmpty();
+  }
+
+  @Test
+  public void filter_GetMetrics() {
+    MetricFilter filter =
+        new MetricFilter() {
+          @Override
+          public boolean matches(MetricName name, io.dropwizard.metrics5.Metric metric) {
+            return name.getKey().startsWith("test");
+          }
+        };
+    dropWizardMetrics = new DropWizardMetrics(Collections.singletonList(metricRegistry), filter);
+    metricRegistry.timer("test_requests");
+    metricRegistry.timer("requests");
+
+    Collection<Metric> metrics = dropWizardMetrics.getMetrics();
+    assertThat(metrics).hasSize(1);
+    Metric value = metrics.iterator().next();
+    assertThat(value.getMetricDescriptor().getName()).isEqualTo("dropwizard5_test_requests_timer");
+  }
+
+  static class IntegerGauge implements Gauge<Integer> {
+    @Override
+    public Integer getValue() {
+      return 1234;
+    }
+  }
+
+  static class DoubleGauge implements Gauge<Double> {
+    @Override
+    public Double getValue() {
+      return 1.234D;
+    }
+  }
+
+  static class LongGauge implements Gauge<Long> {
+    @Override
+    public Long getValue() {
+      return 1234L;
+    }
+  }
+
+  static class FloatGauge implements Gauge<Float> {
+    @Override
+    public Float getValue() {
+      return 0.1234F;
+    }
+  }
+
+  static class BooleanGauge implements Gauge<Boolean> {
+    @Override
+    public Boolean getValue() {
+      return Boolean.TRUE;
+    }
+  }
+}
diff --git a/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardUtilsTest.java b/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardUtilsTest.java
new file mode 100644
index 0000000..bd928e4
--- /dev/null
+++ b/contrib/dropwizard5/src/test/java/io/opencensus/contrib/dropwizard5/DropWizardUtilsTest.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.dropwizard5;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.dropwizard.metrics5.Counter;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Tests for {@link DropWizardUtils}. */
+@RunWith(JUnit4.class)
+public class DropWizardUtilsTest {
+
+  @Test
+  public void generateFullMetricName() {
+    assertThat(DropWizardUtils.generateFullMetricName("requests", "gauge"))
+        .isEqualTo("dropwizard5_requests_gauge");
+  }
+
+  @Test
+  public void generateFullMetricDescription() {
+    assertThat(DropWizardUtils.generateFullMetricDescription("Counter", new Counter()))
+        .isEqualTo(
+            "Collected from dropwizard5 (metric=Counter, type=io.dropwizard.metrics5.Counter)");
+  }
+}
diff --git a/contrib/exemplar_util/README.md b/contrib/exemplar_util/README.md
index 1c9d62d..3423123 100644
--- a/contrib/exemplar_util/README.md
+++ b/contrib/exemplar_util/README.md
@@ -17,14 +17,14 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-exemplar-util</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-contrib-exemplar-util:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-contrib-exemplar-util:0.28.3'
 ```
 
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
diff --git a/contrib/exemplar_util/build.gradle b/contrib/exemplar_util/build.gradle
index 9404b87..8fd6acd 100644
--- a/contrib/exemplar_util/build.gradle
+++ b/contrib/exemplar_util/build.gradle
@@ -10,6 +10,8 @@
 dependencies {
     compile project(':opencensus-api')
 
+    compileOnly libraries.auto_value
+
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContext.java b/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContext.java
new file mode 100644
index 0000000..a675376
--- /dev/null
+++ b/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContext.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.exemplar.util;
+
+import com.google.auto.value.AutoValue;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.trace.SpanContext;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * {@link SpanContext} {@link AttachmentValue}.
+ *
+ * <p>Added in the contrib util package because we want to avoid dependency on Tracing API in Stats
+ * APIs.
+ *
+ * @since 0.20
+ */
+@Immutable
+@AutoValue
+public abstract class AttachmentValueSpanContext extends AttachmentValue {
+
+  AttachmentValueSpanContext() {}
+
+  /**
+   * Returns the span context attachment value.
+   *
+   * @return the span context attachment value.
+   * @since 0.20
+   */
+  public abstract SpanContext getSpanContext();
+
+  /**
+   * Creates an {@link AttachmentValueSpanContext}.
+   *
+   * @param spanContext the span context value.
+   * @return an {@code AttachmentValueSpanContext}.
+   * @since 0.20
+   */
+  public static AttachmentValueSpanContext create(SpanContext spanContext) {
+    return new AutoValue_AttachmentValueSpanContext(spanContext);
+  }
+
+  @Override
+  public final String getValue() {
+    return getSpanContext().toString();
+  }
+}
diff --git a/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/ExemplarUtils.java b/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/ExemplarUtils.java
index 7eb2116..7b3d4b7 100644
--- a/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/ExemplarUtils.java
+++ b/contrib/exemplar_util/src/main/java/io/opencensus/contrib/exemplar/util/ExemplarUtils.java
@@ -16,11 +16,9 @@
 
 package io.opencensus.contrib.exemplar.util;
 
-import io.opencensus.stats.AggregationData.DistributionData.Exemplar;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.stats.MeasureMap;
 import io.opencensus.trace.SpanContext;
-import io.opencensus.trace.SpanId;
-import io.opencensus.trace.TraceId;
 import javax.annotation.Nullable;
 
 /**
@@ -31,40 +29,24 @@
 public final class ExemplarUtils {
 
   /**
-   * Key for {@link TraceId} in the contextual information of an {@link Exemplar}.
+   * Key for {@link SpanContext} in the contextual information of an {@link Exemplar}.
    *
-   * <p>For the {@code TraceId} value of this key, it is suggested to encode it in hex (base 16)
-   * lower case.
-   *
-   * @since 0.16
+   * @since 0.20
    */
-  public static final String ATTACHMENT_KEY_TRACE_ID = "TraceId";
-
-  /**
-   * Key for {@link SpanId} in the contextual information of an {@link Exemplar}.
-   *
-   * <p>For the {@code SpanId} value of this key, it is suggested to encode it in hex (base 16)
-   * lower case.
-   *
-   * @since 0.16
-   */
-  public static final String ATTACHMENT_KEY_SPAN_ID = "SpanId";
+  public static final String ATTACHMENT_KEY_SPAN_CONTEXT = "SpanContext";
 
   /**
    * Puts a {@link SpanContext} into the attachments of the given {@link MeasureMap}.
    *
-   * <p>{@link TraceId} and {@link SpanId} of the {@link SpanContext} will be encoded in base 16
-   * lower case encoding.
-   *
-   * @param measureMap the {@code MeasureMap}
+   * @param measureMap the {@code MeasureMap}.
    * @param spanContext the {@code SpanContext} to be put as attachments.
    * @since 0.16
    */
   public static void putSpanContextAttachments(MeasureMap measureMap, SpanContext spanContext) {
     checkNotNull(measureMap, "measureMap");
     checkNotNull(spanContext, "spanContext");
-    measureMap.putAttachment(ATTACHMENT_KEY_TRACE_ID, spanContext.getTraceId().toLowerBase16());
-    measureMap.putAttachment(ATTACHMENT_KEY_SPAN_ID, spanContext.getSpanId().toLowerBase16());
+    measureMap.putAttachment(
+        ATTACHMENT_KEY_SPAN_CONTEXT, AttachmentValueSpanContext.create(spanContext));
   }
 
   // TODO: reuse this method from shared artifact.
diff --git a/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContextTest.java b/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContextTest.java
new file mode 100644
index 0000000..d79685f
--- /dev/null
+++ b/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/AttachmentValueSpanContextTest.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.exemplar.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import java.util.Random;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link AttachmentValueSpanContext}. */
+@RunWith(JUnit4.class)
+public class AttachmentValueSpanContextTest {
+
+  private static final Random RANDOM = new Random(1234);
+  private static final TraceId TRACE_ID = TraceId.generateRandomId(RANDOM);
+  private static final SpanId SPAN_ID = SpanId.generateRandomId(RANDOM);
+  private static final SpanContext SPAN_CONTEXT =
+      SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void getValue() {
+    AttachmentValueSpanContext attachmentValue = AttachmentValueSpanContext.create(SPAN_CONTEXT);
+    assertThat(attachmentValue.getValue()).isEqualTo(SPAN_CONTEXT.toString());
+  }
+
+  @Test
+  public void getSpanContext() {
+    AttachmentValueSpanContext attachmentValue =
+        AttachmentValueSpanContext.create(SpanContext.INVALID);
+    assertThat(attachmentValue.getSpanContext()).isEqualTo(SpanContext.INVALID);
+  }
+
+  @Test
+  public void preventNullSpanContext() {
+    thrown.expect(NullPointerException.class);
+    AttachmentValueSpanContext.create(null);
+  }
+}
diff --git a/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/ExemplarUtilsTest.java b/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/ExemplarUtilsTest.java
index 766f2c4..dbd4649 100644
--- a/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/ExemplarUtilsTest.java
+++ b/contrib/exemplar_util/src/test/java/io/opencensus/contrib/exemplar/util/ExemplarUtilsTest.java
@@ -17,9 +17,9 @@
 package io.opencensus.contrib.exemplar.util;
 
 import static com.google.common.truth.Truth.assertThat;
-import static io.opencensus.contrib.exemplar.util.ExemplarUtils.ATTACHMENT_KEY_SPAN_ID;
-import static io.opencensus.contrib.exemplar.util.ExemplarUtils.ATTACHMENT_KEY_TRACE_ID;
+import static io.opencensus.contrib.exemplar.util.ExemplarUtils.ATTACHMENT_KEY_SPAN_CONTEXT;
 
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
 import io.opencensus.stats.MeasureMap;
@@ -55,10 +55,7 @@
     ExemplarUtils.putSpanContextAttachments(measureMap, SPAN_CONTEXT);
     assertThat(measureMap.attachments)
         .containsExactly(
-            ATTACHMENT_KEY_TRACE_ID,
-            TRACE_ID.toLowerBase16(),
-            ATTACHMENT_KEY_SPAN_ID,
-            SPAN_ID.toLowerBase16());
+            ATTACHMENT_KEY_SPAN_CONTEXT, AttachmentValueSpanContext.create(SPAN_CONTEXT));
   }
 
   @Test
@@ -78,10 +75,10 @@
 
   private static final class FakeMeasureMap extends MeasureMap {
 
-    private final Map<String, String> attachments = new HashMap<String, String>();
+    private final Map<String, AttachmentValue> attachments = new HashMap<String, AttachmentValue>();
 
     @Override
-    public MeasureMap putAttachment(String key, String value) {
+    public MeasureMap putAttachment(String key, AttachmentValue value) {
       attachments.put(key, value);
       return this;
     }
diff --git a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java
index c09cfbf..54047d3 100644
--- a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java
+++ b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java
@@ -113,126 +113,6 @@
   // RPC client Measures.
 
   /**
-   * {@link Measure} for gRPC client error counts.
-   *
-   * @since 0.8
-   * @deprecated because error counts can be computed on your metrics backend by totalling the
-   *     different per-status values.
-   */
-  @Deprecated
-  public static final MeasureLong RPC_CLIENT_ERROR_COUNT =
-      Measure.MeasureLong.create("grpc.io/client/error_count", "RPC Errors", COUNT);
-
-  /**
-   * {@link Measure} for gRPC client request bytes.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_BYTES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_REQUEST_BYTES =
-      Measure.MeasureDouble.create("grpc.io/client/request_bytes", "Request bytes", BYTE);
-
-  /**
-   * {@link Measure} for gRPC client response bytes.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_BYTES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_RESPONSE_BYTES =
-      Measure.MeasureDouble.create("grpc.io/client/response_bytes", "Response bytes", BYTE);
-
-  /**
-   * {@link Measure} for gRPC client roundtrip latency in milliseconds.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_ROUNDTRIP_LATENCY}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_ROUNDTRIP_LATENCY =
-      Measure.MeasureDouble.create(
-          "grpc.io/client/roundtrip_latency", "RPC roundtrip latency msec", MILLISECOND);
-
-  /**
-   * {@link Measure} for gRPC client server elapsed time in milliseconds.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_SERVER_LATENCY}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_SERVER_ELAPSED_TIME =
-      Measure.MeasureDouble.create(
-          "grpc.io/client/server_elapsed_time", "Server elapsed time in msecs", MILLISECOND);
-
-  /**
-   * {@link Measure} for gRPC client uncompressed request bytes.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_BYTES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_UNCOMPRESSED_REQUEST_BYTES =
-      Measure.MeasureDouble.create(
-          "grpc.io/client/uncompressed_request_bytes", "Uncompressed Request bytes", BYTE);
-
-  /**
-   * {@link Measure} for gRPC client uncompressed response bytes.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_BYTES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_CLIENT_UNCOMPRESSED_RESPONSE_BYTES =
-      Measure.MeasureDouble.create(
-          "grpc.io/client/uncompressed_response_bytes", "Uncompressed Response bytes", BYTE);
-
-  /**
-   * {@link Measure} for number of started client RPCs.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_STARTED_RPCS}.
-   */
-  @Deprecated
-  public static final MeasureLong RPC_CLIENT_STARTED_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/client/started_count", "Number of client RPCs (streams) started", COUNT);
-
-  /**
-   * {@link Measure} for number of finished client RPCs.
-   *
-   * @since 0.8
-   * @deprecated since finished count can be inferred with a {@code Count} aggregation on {@link
-   *     #GRPC_CLIENT_SERVER_LATENCY}.
-   */
-  @Deprecated
-  public static final MeasureLong RPC_CLIENT_FINISHED_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/client/finished_count", "Number of client RPCs (streams) finished", COUNT);
-
-  /**
-   * {@link Measure} for client RPC request message counts.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_MESSAGES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureLong RPC_CLIENT_REQUEST_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/client/request_count", "Number of client RPC request messages", COUNT);
-
-  /**
-   * {@link Measure} for client RPC response message counts.
-   *
-   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC}.
-   * @since 0.8
-   */
-  @Deprecated
-  public static final MeasureLong RPC_CLIENT_RESPONSE_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/client/response_count", "Number of client RPC response messages", COUNT);
-
-  /**
    * {@link Measure} for total bytes sent across all request messages per RPC.
    *
    * @since 0.13
@@ -255,6 +135,48 @@
           BYTE);
 
   /**
+   * {@link Measure} for total bytes sent per method, recorded real-time as bytes are sent.
+   *
+   * @since 0.18
+   */
+  public static final MeasureDouble GRPC_CLIENT_SENT_BYTES_PER_METHOD =
+      Measure.MeasureDouble.create(
+          "grpc.io/client/sent_bytes_per_method",
+          "Total bytes sent per method, recorded real-time as bytes are sent.",
+          BYTE);
+
+  /**
+   * {@link Measure} for total bytes received per method, recorded real-time as bytes are received.
+   *
+   * @since 0.18
+   */
+  public static final MeasureDouble GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD =
+      Measure.MeasureDouble.create(
+          "grpc.io/client/received_bytes_per_method",
+          "Total bytes received per method, recorded real-time as bytes are received.",
+          BYTE);
+
+  /**
+   * {@link Measure} for total client sent messages.
+   *
+   * @since 0.18
+   */
+  public static final MeasureLong GRPC_CLIENT_SENT_MESSAGES_PER_METHOD =
+      Measure.MeasureLong.create(
+          "grpc.io/client/sent_messages_per_method", "Total messages sent per method.", COUNT);
+
+  /**
+   * {@link Measure} for total client received messages.
+   *
+   * @since 0.18
+   */
+  public static final MeasureLong GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD =
+      Measure.MeasureLong.create(
+          "grpc.io/client/received_messages_per_method",
+          "Total messages received per method.",
+          COUNT);
+
+  /**
    * {@link Measure} for gRPC client roundtrip latency in milliseconds.
    *
    * @since 0.13
@@ -305,127 +227,143 @@
       Measure.MeasureLong.create(
           "grpc.io/client/started_rpcs", "Number of started client RPCs.", COUNT);
 
-  // RPC server Measures.
+  /**
+   * {@link Measure} for total number of retry or hedging attempts excluding transparent retries
+   * made during the client call.
+   *
+   * @since 0.31.0
+   */
+  public static final MeasureLong GRPC_CLIENT_RETRIES_PER_CALL =
+      Measure.MeasureLong.create(
+          "grpc.io/client/retries_per_call", "Number of retries per call", COUNT);
 
   /**
-   * {@link Measure} for gRPC server error counts.
+   * {@link Measure} for total number of transparent retries made during the client call.
+   *
+   * @since 0.28
+   */
+  public static final MeasureLong GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL =
+      Measure.MeasureLong.create(
+          "grpc.io/client/transparent_retries_per_call", "Transparent retries per call", COUNT);
+
+  /**
+   * {@link Measure} for total time of delay while there is no active attempt during the client
+   * call.
+   *
+   * @since 0.28
+   */
+  public static final MeasureDouble GRPC_CLIENT_RETRY_DELAY_PER_CALL =
+      Measure.MeasureDouble.create(
+          "grpc.io/client/retry_delay_per_call", "Retry delay per call", MILLISECOND);
+
+  /**
+   * {@link Measure} for gRPC client error counts.
    *
    * @since 0.8
    * @deprecated because error counts can be computed on your metrics backend by totalling the
    *     different per-status values.
    */
   @Deprecated
-  public static final MeasureLong RPC_SERVER_ERROR_COUNT =
-      Measure.MeasureLong.create("grpc.io/server/error_count", "RPC Errors", COUNT);
+  public static final MeasureLong RPC_CLIENT_ERROR_COUNT =
+      Measure.MeasureLong.create("grpc.io/client/error_count", "RPC Errors", COUNT);
 
   /**
-   * {@link Measure} for gRPC server request bytes.
+   * {@link Measure} for gRPC client request bytes.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_BYTES_PER_RPC}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_BYTES_PER_RPC}.
    */
   @Deprecated
-  public static final MeasureDouble RPC_SERVER_REQUEST_BYTES =
-      Measure.MeasureDouble.create("grpc.io/server/request_bytes", "Request bytes", BYTE);
+  public static final MeasureDouble RPC_CLIENT_REQUEST_BYTES = GRPC_CLIENT_SENT_BYTES_PER_RPC;
 
   /**
-   * {@link Measure} for gRPC server response bytes.
+   * {@link Measure} for gRPC client response bytes.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_SENT_BYTES_PER_RPC}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_BYTES_PER_RPC}.
    */
   @Deprecated
-  public static final MeasureDouble RPC_SERVER_RESPONSE_BYTES =
-      Measure.MeasureDouble.create("grpc.io/server/response_bytes", "Response bytes", BYTE);
+  public static final MeasureDouble RPC_CLIENT_RESPONSE_BYTES = GRPC_CLIENT_RECEIVED_BYTES_PER_RPC;
 
   /**
-   * {@link Measure} for gRPC server elapsed time in milliseconds.
+   * {@link Measure} for gRPC client roundtrip latency in milliseconds.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_SERVER_LATENCY}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_ROUNDTRIP_LATENCY}.
    */
   @Deprecated
-  public static final MeasureDouble RPC_SERVER_SERVER_ELAPSED_TIME =
+  public static final MeasureDouble RPC_CLIENT_ROUNDTRIP_LATENCY = GRPC_CLIENT_ROUNDTRIP_LATENCY;
+
+  /**
+   * {@link Measure} for gRPC client server elapsed time in milliseconds.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_CLIENT_SERVER_LATENCY}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_CLIENT_SERVER_ELAPSED_TIME = GRPC_CLIENT_SERVER_LATENCY;
+
+  /**
+   * {@link Measure} for gRPC client uncompressed request bytes.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_BYTES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_CLIENT_UNCOMPRESSED_REQUEST_BYTES =
       Measure.MeasureDouble.create(
-          "grpc.io/server/server_elapsed_time", "Server elapsed time in msecs", MILLISECOND);
+          "grpc.io/client/uncompressed_request_bytes", "Uncompressed Request bytes", BYTE);
 
   /**
-   * {@link Measure} for gRPC server latency in milliseconds.
+   * {@link Measure} for gRPC client uncompressed response bytes.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_SERVER_LATENCY}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_BYTES_PER_RPC}.
    */
   @Deprecated
-  public static final MeasureDouble RPC_SERVER_SERVER_LATENCY =
+  public static final MeasureDouble RPC_CLIENT_UNCOMPRESSED_RESPONSE_BYTES =
       Measure.MeasureDouble.create(
-          "grpc.io/server/server_latency", "Latency in msecs", MILLISECOND);
+          "grpc.io/client/uncompressed_response_bytes", "Uncompressed Response bytes", BYTE);
 
   /**
-   * {@link Measure} for gRPC server uncompressed request bytes.
+   * {@link Measure} for number of started client RPCs.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_BYTES_PER_RPC}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_STARTED_RPCS}.
    */
-  @Deprecated
-  public static final MeasureDouble RPC_SERVER_UNCOMPRESSED_REQUEST_BYTES =
-      Measure.MeasureDouble.create(
-          "grpc.io/server/uncompressed_request_bytes", "Uncompressed Request bytes", BYTE);
+  @Deprecated public static final MeasureLong RPC_CLIENT_STARTED_COUNT = GRPC_CLIENT_STARTED_RPCS;
 
   /**
-   * {@link Measure} for gRPC server uncompressed response bytes.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_SENT_BYTES_PER_RPC}.
-   */
-  @Deprecated
-  public static final MeasureDouble RPC_SERVER_UNCOMPRESSED_RESPONSE_BYTES =
-      Measure.MeasureDouble.create(
-          "grpc.io/server/uncompressed_response_bytes", "Uncompressed Response bytes", BYTE);
-
-  /**
-   * {@link Measure} for number of started server RPCs.
-   *
-   * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_STARTED_RPCS}.
-   */
-  @Deprecated
-  public static final MeasureLong RPC_SERVER_STARTED_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/server/started_count", "Number of server RPCs (streams) started", COUNT);
-
-  /**
-   * {@link Measure} for number of finished server RPCs.
+   * {@link Measure} for number of finished client RPCs.
    *
    * @since 0.8
    * @deprecated since finished count can be inferred with a {@code Count} aggregation on {@link
-   *     #GRPC_SERVER_SERVER_LATENCY}.
+   *     #GRPC_CLIENT_SERVER_LATENCY}.
    */
   @Deprecated
-  public static final MeasureLong RPC_SERVER_FINISHED_COUNT =
+  public static final MeasureLong RPC_CLIENT_FINISHED_COUNT =
       Measure.MeasureLong.create(
-          "grpc.io/server/finished_count", "Number of server RPCs (streams) finished", COUNT);
+          "grpc.io/client/finished_count", "Number of client RPCs (streams) finished", COUNT);
 
   /**
-   * {@link Measure} for server RPC request message counts.
+   * {@link Measure} for client RPC request message counts.
    *
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC}.
+   * @deprecated in favor of {@link #GRPC_CLIENT_SENT_MESSAGES_PER_RPC}.
    */
   @Deprecated
-  public static final MeasureLong RPC_SERVER_REQUEST_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/server/request_count", "Number of server RPC request messages", COUNT);
+  public static final MeasureLong RPC_CLIENT_REQUEST_COUNT = GRPC_CLIENT_SENT_MESSAGES_PER_RPC;
 
   /**
-   * {@link Measure} for server RPC response message counts.
+   * {@link Measure} for client RPC response message counts.
    *
+   * @deprecated in favor of {@link #GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC}.
    * @since 0.8
-   * @deprecated in favor of {@link #GRPC_SERVER_SENT_MESSAGES_PER_RPC}.
    */
   @Deprecated
-  public static final MeasureLong RPC_SERVER_RESPONSE_COUNT =
-      Measure.MeasureLong.create(
-          "grpc.io/server/response_count", "Number of server RPC response messages", COUNT);
+  public static final MeasureLong RPC_CLIENT_RESPONSE_COUNT = GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC;
+
+  // RPC server Measures.
 
   /**
    * {@link Measure} for total bytes sent across all response messages per RPC.
@@ -450,6 +388,48 @@
           BYTE);
 
   /**
+   * {@link Measure} for total bytes sent per method, recorded real-time as bytes are sent.
+   *
+   * @since 0.18
+   */
+  public static final MeasureDouble GRPC_SERVER_SENT_BYTES_PER_METHOD =
+      Measure.MeasureDouble.create(
+          "grpc.io/server/sent_bytes_per_method",
+          "Total bytes sent per method, recorded real-time as bytes are sent.",
+          BYTE);
+
+  /**
+   * {@link Measure} for total bytes received per method, recorded real-time as bytes are received.
+   *
+   * @since 0.18
+   */
+  public static final MeasureDouble GRPC_SERVER_RECEIVED_BYTES_PER_METHOD =
+      Measure.MeasureDouble.create(
+          "grpc.io/server/received_bytes_per_method",
+          "Total bytes received per method, recorded real-time as bytes are received.",
+          BYTE);
+
+  /**
+   * {@link Measure} for total server sent messages.
+   *
+   * @since 0.18
+   */
+  public static final MeasureLong GRPC_SERVER_SENT_MESSAGES_PER_METHOD =
+      Measure.MeasureLong.create(
+          "grpc.io/server/sent_messages_per_method", "Total messages sent per method.", COUNT);
+
+  /**
+   * {@link Measure} for total server received messages.
+   *
+   * @since 0.18
+   */
+  public static final MeasureLong GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD =
+      Measure.MeasureLong.create(
+          "grpc.io/server/received_messages_per_method",
+          "Total messages received per method.",
+          COUNT);
+
+  /**
    * {@link Measure} for number of messages sent in each RPC.
    *
    * @since 0.13
@@ -491,5 +471,114 @@
       Measure.MeasureLong.create(
           "grpc.io/server/started_rpcs", "Number of started server RPCs.", COUNT);
 
+  /**
+   * {@link Measure} for gRPC server error counts.
+   *
+   * @since 0.8
+   * @deprecated because error counts can be computed on your metrics backend by totalling the
+   *     different per-status values.
+   */
+  @Deprecated
+  public static final MeasureLong RPC_SERVER_ERROR_COUNT =
+      Measure.MeasureLong.create("grpc.io/server/error_count", "RPC Errors", COUNT);
+
+  /**
+   * {@link Measure} for gRPC server request bytes.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_BYTES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_REQUEST_BYTES = GRPC_SERVER_RECEIVED_BYTES_PER_RPC;
+
+  /**
+   * {@link Measure} for gRPC server response bytes.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_SENT_BYTES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_RESPONSE_BYTES = GRPC_SERVER_SENT_BYTES_PER_RPC;
+
+  /**
+   * {@link Measure} for gRPC server elapsed time in milliseconds.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_SERVER_LATENCY}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_SERVER_ELAPSED_TIME =
+      Measure.MeasureDouble.create(
+          "grpc.io/server/server_elapsed_time", "Server elapsed time in msecs", MILLISECOND);
+
+  /**
+   * {@link Measure} for gRPC server latency in milliseconds.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_SERVER_LATENCY}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_SERVER_LATENCY = GRPC_SERVER_SERVER_LATENCY;
+
+  /**
+   * {@link Measure} for gRPC server uncompressed request bytes.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_BYTES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_UNCOMPRESSED_REQUEST_BYTES =
+      Measure.MeasureDouble.create(
+          "grpc.io/server/uncompressed_request_bytes", "Uncompressed Request bytes", BYTE);
+
+  /**
+   * {@link Measure} for gRPC server uncompressed response bytes.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_SENT_BYTES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureDouble RPC_SERVER_UNCOMPRESSED_RESPONSE_BYTES =
+      Measure.MeasureDouble.create(
+          "grpc.io/server/uncompressed_response_bytes", "Uncompressed Response bytes", BYTE);
+
+  /**
+   * {@link Measure} for number of started server RPCs.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_STARTED_RPCS}.
+   */
+  @Deprecated public static final MeasureLong RPC_SERVER_STARTED_COUNT = GRPC_SERVER_STARTED_RPCS;
+
+  /**
+   * {@link Measure} for number of finished server RPCs.
+   *
+   * @since 0.8
+   * @deprecated since finished count can be inferred with a {@code Count} aggregation on {@link
+   *     #GRPC_SERVER_SERVER_LATENCY}.
+   */
+  @Deprecated
+  public static final MeasureLong RPC_SERVER_FINISHED_COUNT =
+      Measure.MeasureLong.create(
+          "grpc.io/server/finished_count", "Number of server RPCs (streams) finished", COUNT);
+
+  /**
+   * {@link Measure} for server RPC request message counts.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureLong RPC_SERVER_REQUEST_COUNT = GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC;
+
+  /**
+   * {@link Measure} for server RPC response message counts.
+   *
+   * @since 0.8
+   * @deprecated in favor of {@link #GRPC_SERVER_SENT_MESSAGES_PER_RPC}.
+   */
+  @Deprecated
+  public static final MeasureLong RPC_SERVER_RESPONSE_COUNT = GRPC_SERVER_SENT_MESSAGES_PER_RPC;
+
   private RpcMeasureConstants() {}
 }
diff --git a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViewConstants.java b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViewConstants.java
index fbe1d58..35dffdd 100644
--- a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViewConstants.java
+++ b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViewConstants.java
@@ -17,18 +17,29 @@
 package io.opencensus.contrib.grpc.metrics;
 
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_METHOD;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RETRIES_PER_CALL;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_RETRY_DELAY_PER_CALL;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_ROUNDTRIP_LATENCY;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_SENT_BYTES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_SENT_BYTES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_SENT_MESSAGES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_SENT_MESSAGES_PER_RPC;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_SERVER_LATENCY;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_STARTED_RPCS;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_STATUS;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_METHOD;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_RECEIVED_BYTES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_RECEIVED_BYTES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_SENT_BYTES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_SENT_BYTES_PER_RPC;
+import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_SENT_MESSAGES_PER_METHOD;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_SENT_MESSAGES_PER_RPC;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_SERVER_LATENCY;
 import static io.opencensus.contrib.grpc.metrics.RpcMeasureConstants.GRPC_SERVER_STARTED_RPCS;
@@ -63,6 +74,7 @@
 import io.opencensus.stats.Aggregation;
 import io.opencensus.stats.Aggregation.Count;
 import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.Aggregation.Sum;
 import io.opencensus.stats.BucketBoundaries;
 import io.opencensus.stats.View;
 import java.util.Arrays;
@@ -122,9 +134,14 @@
               0.0, 1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0, 2048.0,
               4096.0, 8192.0, 16384.0, 32768.0, 65536.0));
 
+  @VisibleForTesting
+  static final List<Double> RETRY_COUNT_PER_CALL_BUCKET_BOUNDARIES =
+      Collections.unmodifiableList(Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0));
+
   // Use Aggregation.Mean to record sum and count stats at the same time.
   @VisibleForTesting static final Aggregation MEAN = Aggregation.Mean.create();
   @VisibleForTesting static final Aggregation COUNT = Count.create();
+  @VisibleForTesting static final Aggregation SUM = Sum.create();
 
   @VisibleForTesting
   static final Aggregation AGGREGATION_WITH_BYTES_HISTOGRAM =
@@ -141,6 +158,10 @@
   static final Aggregation AGGREGATION_WITH_COUNT_HISTOGRAM =
       Distribution.create(BucketBoundaries.create(RPC_COUNT_BUCKET_BOUNDARIES));
 
+  @VisibleForTesting
+  static final Aggregation AGGREGATION_WITH_COUNT_RETRY_HISTOGRAM =
+      Distribution.create(BucketBoundaries.create(RETRY_COUNT_PER_CALL_BUCKET_BOUNDARIES));
+
   @VisibleForTesting static final Duration MINUTE = Duration.create(60, 0);
   @VisibleForTesting static final Duration HOUR = Duration.create(60 * 60, 0);
 
@@ -411,6 +432,58 @@
           Arrays.asList(GRPC_CLIENT_METHOD));
 
   /**
+   * {@link View} for client sent bytes per method.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_CLIENT_SENT_BYTES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/sent_bytes_per_method"),
+          "Sent bytes per method",
+          GRPC_CLIENT_SENT_BYTES_PER_METHOD,
+          SUM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for client received bytes per method.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/received_bytes_per_method"),
+          "Received bytes per method",
+          GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD,
+          SUM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for client sent messages.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_CLIENT_SENT_MESSAGES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/sent_messages_per_method"),
+          "Number of messages sent",
+          GRPC_CLIENT_SENT_MESSAGES_PER_METHOD,
+          COUNT,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for client received messages.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/received_messages_per_method"),
+          "Number of messages received",
+          GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD,
+          COUNT,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
    * {@link View} for completed client RPCs.
    *
    * <p>This {@code View} uses measure {@code GRPC_CLIENT_ROUNDTRIP_LATENCY}, since completed RPCs
@@ -440,6 +513,71 @@
           COUNT,
           Arrays.asList(GRPC_CLIENT_METHOD));
 
+  /**
+   * {@link View} for client retries per call.
+   *
+   * @since 0.28
+   */
+  public static final View GRPC_CLIENT_RETRIES_PER_CALL_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/retries_per_call"),
+          "Number of client retries per call",
+          GRPC_CLIENT_RETRIES_PER_CALL,
+          AGGREGATION_WITH_COUNT_RETRY_HISTOGRAM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for total transparent client retries across calls.
+   *
+   * @since 0.28
+   */
+  public static final View GRPC_CLIENT_TRANSPARENT_RETRIES_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/transparent_retries"),
+          "Total number of transparent client retries across calls",
+          GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL,
+          SUM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for total time of delay while there is no active attempt during the client call.
+   *
+   * @since 0.28
+   */
+  public static final View GRPC_CLIENT_RETRY_DELAY_PER_CALL_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/retry_delay_per_call"),
+          "Total time of delay while there is no active attempt during the client call",
+          GRPC_CLIENT_RETRY_DELAY_PER_CALL,
+          AGGREGATION_WITH_MILLIS_HISTOGRAM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for total retries across all calls, excluding transparent retries.
+   *
+   * @since 0.28
+   */
+  public static final View GRPC_CLIENT_RETRIES_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/retries"),
+          "Total number of client retries across all calls",
+          GRPC_CLIENT_RETRIES_PER_CALL,
+          SUM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
+  /**
+   * {@link View} for transparent retries per call.
+   *
+   * @since 0.28
+   */
+  public static final View GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL_VIEW =
+      View.create(
+          View.Name.create("grpc.io/client/transparent_retries_per_call"),
+          "Number of transparent client retries per call",
+          GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL,
+          AGGREGATION_WITH_COUNT_RETRY_HISTOGRAM,
+          Arrays.asList(GRPC_CLIENT_METHOD));
+
   // Rpc server cumulative views.
 
   /**
@@ -684,6 +822,58 @@
           Arrays.asList(GRPC_SERVER_METHOD));
 
   /**
+   * {@link View} for total server sent bytes per method.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_SERVER_SENT_BYTES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/server/sent_bytes_per_method"),
+          "Sent bytes per method",
+          GRPC_SERVER_SENT_BYTES_PER_METHOD,
+          SUM,
+          Arrays.asList(GRPC_SERVER_METHOD));
+
+  /**
+   * {@link View} for total server received bytes per method.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_SERVER_RECEIVED_BYTES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/server/received_bytes_per_method"),
+          "Received bytes per method",
+          GRPC_SERVER_RECEIVED_BYTES_PER_METHOD,
+          SUM,
+          Arrays.asList(GRPC_SERVER_METHOD));
+
+  /**
+   * {@link View} for server sent messages.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_SERVER_SENT_MESSAGES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/server/sent_messages_per_method"),
+          "Number of messages sent",
+          GRPC_SERVER_SENT_MESSAGES_PER_METHOD,
+          COUNT,
+          Arrays.asList(GRPC_SERVER_METHOD));
+
+  /**
+   * {@link View} for server received messages.
+   *
+   * @since 0.18
+   */
+  public static final View GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD_VIEW =
+      View.create(
+          View.Name.create("grpc.io/server/received_messages_per_method"),
+          "Number of messages received",
+          GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD,
+          COUNT,
+          Arrays.asList(GRPC_SERVER_METHOD));
+
+  /**
    * {@link View} for completed server RPCs.
    *
    * <p>This {@code View} uses measure {@code GRPC_SERVER_SERVER_LATENCY}, since completed RPCs can
diff --git a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViews.java b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViews.java
index ef06ba2..1926c30 100644
--- a/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViews.java
+++ b/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViews.java
@@ -126,6 +126,39 @@
           RpcViewConstants.RPC_SERVER_STARTED_COUNT_HOUR_VIEW,
           RpcViewConstants.RPC_SERVER_FINISHED_COUNT_HOUR_VIEW);
 
+  @VisibleForTesting
+  static final ImmutableSet<View> GRPC_REAL_TIME_METRICS_VIEWS_SET =
+      ImmutableSet.of(
+          RpcViewConstants.GRPC_CLIENT_SENT_BYTES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_CLIENT_SENT_MESSAGES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_SERVER_SENT_BYTES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_SERVER_RECEIVED_BYTES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_SERVER_SENT_MESSAGES_PER_METHOD_VIEW,
+          RpcViewConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD_VIEW);
+
+  @VisibleForTesting
+  static final ImmutableSet<View> GRPC_CLIENT_BASIC_VIEWS_SET =
+      ImmutableSet.of(
+          RpcViewConstants.GRPC_CLIENT_ROUNDTRIP_LATENCY_VIEW,
+          RpcViewConstants.GRPC_CLIENT_STARTED_RPC_VIEW);
+
+  @VisibleForTesting
+  static final ImmutableSet<View> GRPC_CLIENT_RETRY_VIEWS_SET =
+      ImmutableSet.of(
+          RpcViewConstants.GRPC_CLIENT_RETRIES_PER_CALL_VIEW,
+          RpcViewConstants.GRPC_CLIENT_RETRIES_VIEW,
+          RpcViewConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL_VIEW,
+          RpcViewConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_VIEW,
+          RpcViewConstants.GRPC_CLIENT_RETRY_DELAY_PER_CALL_VIEW);
+
+  @VisibleForTesting
+  static final ImmutableSet<View> GRPC_SERVER_BASIC_VIEWS_SET =
+      ImmutableSet.of(
+          RpcViewConstants.GRPC_SERVER_SERVER_LATENCY_VIEW,
+          RpcViewConstants.GRPC_SERVER_STARTED_RPC_VIEW);
+
   /**
    * Registers all standard gRPC views.
    *
@@ -165,6 +198,24 @@
   }
 
   /**
+   * Registers client retry gRPC views.
+   *
+   * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
+   *
+   * @since 0.31.0
+   */
+  public static void registerClientRetryGrpcViews() {
+    registerClientRetryGrpcViews(Stats.getViewManager());
+  }
+
+  @VisibleForTesting
+  static void registerClientRetryGrpcViews(ViewManager viewManager) {
+    for (View view : GRPC_CLIENT_RETRY_VIEWS_SET) {
+      viewManager.registerView(view);
+    }
+  }
+
+  /**
    * Registers all standard server gRPC views.
    *
    * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
@@ -183,6 +234,62 @@
   }
 
   /**
+   * Registers all basic gRPC views.
+   *
+   * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
+   *
+   * <p>This is equivalent with calling {@link #registerClientGrpcBasicViews()} and {@link
+   * #registerServerGrpcBasicViews()}.
+   *
+   * @since 0.19
+   */
+  public static void registerAllGrpcBasicViews() {
+    registerAllGrpcBasicViews(Stats.getViewManager());
+  }
+
+  @VisibleForTesting
+  static void registerAllGrpcBasicViews(ViewManager viewManager) {
+    registerClientGrpcBasicViews(viewManager);
+    registerServerGrpcBasicViews(viewManager);
+  }
+
+  /**
+   * Registers basic client gRPC views.
+   *
+   * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
+   *
+   * @since 0.19
+   */
+  public static void registerClientGrpcBasicViews() {
+    registerClientGrpcBasicViews(Stats.getViewManager());
+  }
+
+  @VisibleForTesting
+  static void registerClientGrpcBasicViews(ViewManager viewManager) {
+    for (View view : GRPC_CLIENT_BASIC_VIEWS_SET) {
+      viewManager.registerView(view);
+    }
+  }
+
+  /**
+   * Registers basic server gRPC views.
+   *
+   * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
+   *
+   * @since 0.19
+   */
+  public static void registerServerGrpcBasicViews() {
+    registerServerGrpcBasicViews(Stats.getViewManager());
+  }
+
+  @VisibleForTesting
+  static void registerServerGrpcBasicViews(ViewManager viewManager) {
+    for (View view : GRPC_SERVER_BASIC_VIEWS_SET) {
+      viewManager.registerView(view);
+    }
+  }
+
+  /**
    * Registers all standard cumulative views.
    *
    * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
@@ -246,5 +353,22 @@
     registerAllIntervalViews(viewManager);
   }
 
+  /**
+   * Registers views for real time metrics reporting for streaming RPCs. This views will produce
+   * data only for streaming gRPC calls.
+   *
+   * @since 0.18
+   */
+  public static void registerRealTimeMetricsViews() {
+    registerRealTimeMetricsViews(Stats.getViewManager());
+  }
+
+  @VisibleForTesting
+  static void registerRealTimeMetricsViews(ViewManager viewManager) {
+    for (View view : GRPC_REAL_TIME_METRICS_VIEWS_SET) {
+      viewManager.registerView(view);
+    }
+  }
+
   private RpcViews() {}
 }
diff --git a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstantsTest.java b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstantsTest.java
index 107f0fe..1814613 100644
--- a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstantsTest.java
+++ b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstantsTest.java
@@ -52,9 +52,14 @@
     assertThat(RpcMeasureConstants.GRPC_CLIENT_SENT_MESSAGES_PER_RPC).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_RPC).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_CLIENT_SENT_BYTES_PER_METHOD).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_CLIENT_SERVER_LATENCY).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_CLIENT_ROUNDTRIP_LATENCY).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_CLIENT_STARTED_RPCS).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_CLIENT_RETRIES_PER_CALL).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_CLIENT_RETRY_DELAY_PER_CALL).isNotNull();
 
     // Test server measurement descriptors.
     assertThat(RpcMeasureConstants.RPC_SERVER_ERROR_COUNT).isNotNull();
@@ -72,6 +77,8 @@
     assertThat(RpcMeasureConstants.GRPC_SERVER_SENT_MESSAGES_PER_RPC).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_SERVER_RECEIVED_BYTES_PER_RPC).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_SERVER_SENT_BYTES_PER_METHOD).isNotNull();
+    assertThat(RpcMeasureConstants.GRPC_SERVER_RECEIVED_BYTES_PER_METHOD).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_SERVER_SERVER_LATENCY).isNotNull();
     assertThat(RpcMeasureConstants.GRPC_SERVER_STARTED_RPCS).isNotNull();
   }
diff --git a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewConstantsTest.java b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewConstantsTest.java
index 6f8b516..0212487 100644
--- a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewConstantsTest.java
+++ b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewConstantsTest.java
@@ -66,6 +66,9 @@
             0.0, 1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0, 2048.0, 4096.0,
             8192.0, 16384.0, 32768.0, 65536.0)
         .inOrder();
+    assertThat(RpcViewConstants.RETRY_COUNT_PER_CALL_BUCKET_BOUNDARIES)
+        .containsExactly(1.0, 2.0, 3.0, 4.0, 5.0)
+        .inOrder();
 
     // Test Aggregations
     assertThat(RpcViewConstants.MEAN).isEqualTo(Mean.create());
@@ -82,7 +85,10 @@
         .isEqualTo(
             Distribution.create(
                 BucketBoundaries.create(RpcViewConstants.RPC_COUNT_BUCKET_BOUNDARIES)));
-
+    assertThat(RpcViewConstants.AGGREGATION_WITH_COUNT_RETRY_HISTOGRAM)
+        .isEqualTo(
+            Distribution.create(
+                BucketBoundaries.create(RpcViewConstants.RETRY_COUNT_PER_CALL_BUCKET_BOUNDARIES)));
     // Test Duration and Window
     assertThat(RpcViewConstants.MINUTE).isEqualTo(Duration.create(60, 0));
     assertThat(RpcViewConstants.HOUR).isEqualTo(Duration.create(60 * 60, 0));
@@ -107,8 +113,16 @@
     assertThat(RpcViewConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_RPC_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_CLIENT_SENT_MESSAGES_PER_RPC_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_RPC_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_SENT_BYTES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_RECEIVED_BYTES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_SENT_MESSAGES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_RECEIVED_MESSAGES_PER_METHOD_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_CLIENT_SERVER_LATENCY_VIEW).isNotNull();
-    assertThat(RpcViewConstants.GRPC_CLIENT_STARTED_RPC_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_RETRIES_PER_CALL_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_RETRIES_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_PER_CALL_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_TRANSPARENT_RETRIES_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_CLIENT_RETRY_DELAY_PER_CALL_VIEW).isNotNull();
 
     // Test server distribution view descriptors.
     assertThat(RpcViewConstants.RPC_SERVER_ERROR_COUNT_VIEW).isNotNull();
@@ -124,6 +138,10 @@
     assertThat(RpcViewConstants.GRPC_SERVER_RECEIVED_BYTES_PER_RPC_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_SERVER_SENT_MESSAGES_PER_RPC_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_RPC_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_SERVER_SENT_BYTES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_SERVER_RECEIVED_BYTES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_SERVER_SENT_MESSAGES_PER_METHOD_VIEW).isNotNull();
+    assertThat(RpcViewConstants.GRPC_SERVER_RECEIVED_MESSAGES_PER_METHOD_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_SERVER_SERVER_LATENCY_VIEW).isNotNull();
     assertThat(RpcViewConstants.GRPC_SERVER_STARTED_RPC_VIEW).isNotNull();
 
diff --git a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewsTest.java b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewsTest.java
index a908629..5052ab1 100644
--- a/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewsTest.java
+++ b/contrib/grpc_metrics/src/test/java/io/opencensus/contrib/grpc/metrics/RpcViewsTest.java
@@ -84,6 +84,14 @@
   }
 
   @Test
+  public void registerClientRetryGrpcViews() {
+    FakeViewManager fakeViewManager = new FakeViewManager();
+    RpcViews.registerClientRetryGrpcViews(fakeViewManager);
+    assertThat(fakeViewManager.getRegisteredViews())
+        .containsExactlyElementsIn(RpcViews.GRPC_CLIENT_RETRY_VIEWS_SET);
+  }
+
+  @Test
   public void registerServerGrpcViews() {
     FakeViewManager fakeViewManager = new FakeViewManager();
     RpcViews.registerServerGrpcViews(fakeViewManager);
@@ -91,6 +99,42 @@
         .containsExactlyElementsIn(RpcViews.GRPC_SERVER_VIEWS_SET);
   }
 
+  @Test
+  public void registerAllGrpcBasicViews() {
+    FakeViewManager fakeViewManager = new FakeViewManager();
+    RpcViews.registerAllGrpcBasicViews(fakeViewManager);
+    assertThat(fakeViewManager.getRegisteredViews())
+        .containsExactlyElementsIn(
+            ImmutableSet.builder()
+                .addAll(RpcViews.GRPC_CLIENT_BASIC_VIEWS_SET)
+                .addAll(RpcViews.GRPC_SERVER_BASIC_VIEWS_SET)
+                .build());
+  }
+
+  @Test
+  public void registerClientGrpcBasicViews() {
+    FakeViewManager fakeViewManager = new FakeViewManager();
+    RpcViews.registerClientGrpcBasicViews(fakeViewManager);
+    assertThat(fakeViewManager.getRegisteredViews())
+        .containsExactlyElementsIn(RpcViews.GRPC_CLIENT_BASIC_VIEWS_SET);
+  }
+
+  @Test
+  public void registerServerGrpcBasicViews() {
+    FakeViewManager fakeViewManager = new FakeViewManager();
+    RpcViews.registerServerGrpcBasicViews(fakeViewManager);
+    assertThat(fakeViewManager.getRegisteredViews())
+        .containsExactlyElementsIn(RpcViews.GRPC_SERVER_BASIC_VIEWS_SET);
+  }
+
+  @Test
+  public void registerRealTimeMetricsViews() {
+    FakeViewManager fakeViewManager = new FakeViewManager();
+    RpcViews.registerRealTimeMetricsViews(fakeViewManager);
+    assertThat(fakeViewManager.getRegisteredViews())
+        .containsExactlyElementsIn(RpcViews.GRPC_REAL_TIME_METRICS_VIEWS_SET);
+  }
+
   // TODO(bdrutu): Test with reflection that all defined gRPC views are registered.
 
   private static final class FakeViewManager extends ViewManager {
diff --git a/contrib/grpc_util/README.md b/contrib/grpc_util/README.md
index 7c5c7b9..235d899 100644
--- a/contrib/grpc_util/README.md
+++ b/contrib/grpc_util/README.md
@@ -16,14 +16,14 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-grpc-util</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-contrib-grpc-util:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-contrib-grpc-util:0.28.3'
 ```
 
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
diff --git a/contrib/http_jaxrs/README.md b/contrib/http_jaxrs/README.md
new file mode 100644
index 0000000..07aa74d
--- /dev/null
+++ b/contrib/http_jaxrs/README.md
@@ -0,0 +1,108 @@
+# OpenCensus JAX-RS
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+[![Maven Central][maven-image]][maven-url]
+
+The *OpenCensus JAX-RS for Java* is a container and client filter  for trace instrumentation when using JAX-RS for REST implementation in Java.
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-contrib-http-jaxrs</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-http-jaxrs:0.28.3'
+```
+
+### Usage
+
+#### Container Filter
+
+The container filter should be added to the JAX-RS `Application` class and endpoints should be annotated
+with `@Metrics` annotation.
+
+```java
+class MyApplication extends Application {
+  @Override
+  public Set<Class<?>> getClasses() {
+      Set<Class<?>> providers = new HashSet<>(super.getClasses());
+      providers.add(JaxrsContainerFilter.class);
+      return providers;
+  }
+}
+```
+
+It is possible to customize the filter by using the custom constructor. The below will
+use the `B3Format` for context propagation instead of the W3C text context format.
+
+```java
+class MyApplication extends Application {
+  @Override
+  public Set<Object> getSingletons() {
+    Set<Object> singletons = new HashSet<>(super.getSingletons());
+    singletons.add(new JaxrsContainerFilter(
+        new JaxrsContainerExtractor(),
+        Tracing.getPropagationComponent().getB3Format(),
+        /* publicEndpoint= */ true));
+    return singletons;
+  }
+}
+```
+
+```java
+@Metrics
+@Path("/resource")
+class MyResource {
+  @GET
+  public Response resource() {
+    ...
+  }
+}
+```
+
+The annotation may also be applied on method level.
+
+#### Client Filter
+
+Filter should be added to the `WebTarget` instance when using JAX-RS as client.
+
+```java
+WebTarget target = ClientBuilder.newClient().target("endpoint");
+target.register(JaxrsClientFilter.class);
+```
+
+It is possible to customize the filter using the custom constructor. The
+below will use the `B3Format` for context propagation instead of the default W3C
+text context format.
+
+```java
+WebTarget target = ClientBuilder.newClient().target("endpoint");
+target.register(new JaxrsClientFilter(
+    new JaxrsContainerExtractor(),
+    Tracing.getPropagationComponent().getB3Format()));
+```
+
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-http-jetty-client/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-jetty-client
diff --git a/contrib/http_jaxrs/build.gradle b/contrib/http_jaxrs/build.gradle
new file mode 100644
index 0000000..ec12bd8
--- /dev/null
+++ b/contrib/http_jaxrs/build.gradle
@@ -0,0 +1,24 @@
+plugins {
+    id 'java'
+}
+
+description = 'OpenCensus Http Servlet Plugin'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.8
+    it.targetCompatibility = 1.8
+}
+
+dependencies {
+    compile project(':opencensus-api')
+    compile project(':opencensus-contrib-http-util')
+
+    // Will be provided from elsewhere at runtime
+    compileOnly('javax.ws.rs:javax.ws.rs-api:2.1.1')
+    compileOnly('javax.annotation:javax.annotation-api:1.3.2')
+
+    testCompile('javax.ws.rs:javax.ws.rs-api:2.1.1')
+    testCompile('javax.annotation:javax.annotation-api:1.3.2')
+
+    signature "org.codehaus.mojo.signature:java17:1.0@signature"
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/ExtendedContainerRequest.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/ExtendedContainerRequest.java
new file mode 100644
index 0000000..c5877e1
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/ExtendedContainerRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ResourceInfo;
+
+/** Contains both {@link ContainerRequestContext} and {@link ResourceInfo}. */
+class ExtendedContainerRequest {
+  private final ContainerRequestContext requestContext;
+  private final ResourceInfo resourceInfo;
+
+  public ExtendedContainerRequest(
+      ContainerRequestContext requestContext, ResourceInfo resourceInfo) {
+    this.requestContext = requestContext;
+    this.resourceInfo = resourceInfo;
+  }
+
+  public ContainerRequestContext getRequestContext() {
+    return requestContext;
+  }
+
+  public ResourceInfo getResourceInfo() {
+    return resourceInfo;
+  }
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractor.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractor.java
new file mode 100644
index 0000000..d2a45fb
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractor.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import io.opencensus.contrib.http.HttpExtractor;
+import javax.annotation.Nullable;
+import javax.ws.rs.client.ClientRequestContext;
+import javax.ws.rs.client.ClientResponseContext;
+
+/**
+ * Extracts information from JAX-RS client request and response.
+ *
+ * @since 0.19
+ */
+public class JaxrsClientExtractor
+    extends HttpExtractor<ClientRequestContext, ClientResponseContext> {
+
+  @Nullable
+  @Override
+  public String getRoute(ClientRequestContext request) {
+    return null;
+  }
+
+  @Nullable
+  @Override
+  public String getUrl(ClientRequestContext request) {
+    return request.getUri().toString();
+  }
+
+  @Nullable
+  @Override
+  public String getHost(ClientRequestContext request) {
+    if (request == null) {
+      return "null_request";
+    } else if (request.getUri() == null) {
+      return "null_uri";
+    } else {
+      return request.getUri().getHost();
+    }
+  }
+
+  @Nullable
+  @Override
+  public String getMethod(ClientRequestContext request) {
+    return request.getMethod();
+  }
+
+  @Nullable
+  @Override
+  public String getPath(ClientRequestContext request) {
+    return request.getUri().getPath();
+  }
+
+  @Nullable
+  @Override
+  public String getUserAgent(ClientRequestContext request) {
+    return request.getHeaderString("user-agent");
+  }
+
+  @Override
+  public int getStatusCode(@Nullable ClientResponseContext response) {
+    return response != null ? response.getStatus() : 0;
+  }
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilter.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilter.java
new file mode 100644
index 0000000..340eb54
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilter.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import io.opencensus.contrib.http.HttpClientHandler;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import javax.ws.rs.client.ClientRequestContext;
+import javax.ws.rs.client.ClientRequestFilter;
+import javax.ws.rs.client.ClientResponseContext;
+import javax.ws.rs.client.ClientResponseFilter;
+import javax.ws.rs.ext.Provider;
+
+/**
+ * JAX-RS client request and response filter to provide instrumentation of client calls with
+ * OpenCensus.
+ *
+ * @since 0.19
+ */
+@Provider
+public class JaxrsClientFilter implements ClientRequestFilter, ClientResponseFilter {
+
+  private static final String OPENCENSUS_CONTEXT = "opencensus.context";
+  private static final Setter<ClientRequestContext> SETTER =
+      new Setter<ClientRequestContext>() {
+        @Override
+        public void put(ClientRequestContext carrier, String key, String value) {
+          carrier.getHeaders().putSingle(key, value);
+        }
+      };
+
+  private final HttpClientHandler<ClientRequestContext, ClientResponseContext, ClientRequestContext>
+      handler;
+
+  /** Constructs new client filter with default configuration. */
+  public JaxrsClientFilter() {
+    this(new JaxrsClientExtractor(), Tracing.getPropagationComponent().getTraceContextFormat());
+  }
+
+  /**
+   * Construct new client filter with custom configuration.
+   *
+   * @param extractor the {@code HttpExtractor} used to extract information from the
+   *     request/response.
+   * @param propagationFormat the {@code TextFormat} used in HTTP propagation.
+   */
+  public JaxrsClientFilter(
+      HttpExtractor<ClientRequestContext, ClientResponseContext> extractor,
+      TextFormat propagationFormat) {
+    handler = new HttpClientHandler<>(Tracing.getTracer(), extractor, propagationFormat, SETTER);
+  }
+
+  @Override
+  public void filter(ClientRequestContext requestContext) {
+    HttpRequestContext context = handler.handleStart(null, requestContext, requestContext);
+    requestContext.setProperty(OPENCENSUS_CONTEXT, context);
+  }
+
+  @Override
+  public void filter(ClientRequestContext requestContext, ClientResponseContext responseContext) {
+    HttpRequestContext context =
+        (HttpRequestContext) requestContext.getProperty(OPENCENSUS_CONTEXT);
+    handler.handleEnd(context, requestContext, responseContext, null);
+  }
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractor.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractor.java
new file mode 100644
index 0000000..8b930f6
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractor.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import io.opencensus.contrib.http.HttpExtractor;
+import java.lang.reflect.Method;
+import javax.annotation.Nullable;
+import javax.ws.rs.Path;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ResourceInfo;
+
+/**
+ * Extracts information from JAX-RS container request and response.
+ *
+ * @since 0.19
+ */
+public class JaxrsContainerExtractor
+    extends HttpExtractor<ExtendedContainerRequest, ContainerResponseContext> {
+
+  @Nullable
+  @Override
+  public String getRoute(ExtendedContainerRequest request) {
+    return resolveRoute(request.getResourceInfo());
+  }
+
+  @Nullable
+  @Override
+  public String getUrl(ExtendedContainerRequest request) {
+    return request.getRequestContext().getUriInfo().getRequestUri().toString();
+  }
+
+  @Nullable
+  @Override
+  public String getHost(ExtendedContainerRequest request) {
+    return request.getRequestContext().getHeaderString("host");
+  }
+
+  @Nullable
+  @Override
+  public String getMethod(ExtendedContainerRequest request) {
+    return request.getRequestContext().getMethod();
+  }
+
+  @Nullable
+  @Override
+  public String getPath(ExtendedContainerRequest request) {
+    return request.getRequestContext().getUriInfo().getPath();
+  }
+
+  @Nullable
+  @Override
+  public String getUserAgent(ExtendedContainerRequest request) {
+    return request.getRequestContext().getHeaderString("user-agent");
+  }
+
+  @Override
+  public int getStatusCode(@Nullable ContainerResponseContext response) {
+    return response != null ? response.getStatus() : 0;
+  }
+
+  @Nullable
+  @SuppressWarnings("dereference.of.nullable") // The annotations are checked
+  private static String resolveRoute(ResourceInfo info) {
+    StringBuilder path = new StringBuilder();
+
+    Class<?> c = info.getResourceClass();
+    if (c != null && c.isAnnotationPresent(Path.class)) {
+      Path p = c.getAnnotation(Path.class);
+      path.append(p.value());
+    }
+
+    Method m = info.getResourceMethod();
+    if (m != null && m.isAnnotationPresent(Path.class)) {
+      Path p = m.getAnnotation(Path.class);
+      if (!endsWithSlash(path) && !p.value().startsWith("/")) {
+        path.append("/");
+      }
+      if (endsWithSlash(path) && p.value().startsWith("/")) {
+        path.deleteCharAt(path.lastIndexOf("/"));
+      }
+      path.append(p.value());
+    }
+
+    return path.length() == 0 ? null : path.toString();
+  }
+
+  private static boolean endsWithSlash(StringBuilder path) {
+    return path.lastIndexOf("/") == (path.length() - 1);
+  }
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilter.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilter.java
new file mode 100644
index 0000000..41dbfcd
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilter.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import java.io.IOException;
+import javax.annotation.Priority;
+import javax.ws.rs.Priorities;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.container.ResourceInfo;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.ext.Provider;
+
+/**
+ * JAX-RS request and response filter to provide instrumentation of JAX-RS based endpoint with
+ * OpenCensus. Filter will instrument any endpoint marked with the {@link Metrics} annotation.
+ * Filter will also start a scoped span so that child spans may be added.
+ *
+ * @since 0.19
+ */
+@Metrics
+@Provider
+@Priority(Priorities.USER - 100)
+public class JaxrsContainerFilter implements ContainerRequestFilter, ContainerResponseFilter {
+
+  private static final String CONTEXT_PROPERTY = "opencensus.context";
+  private static final String SPAN_PROPERTY = "opencensus.span";
+  private static final Getter<ContainerRequestContext> GETTER =
+      new Getter<ContainerRequestContext>() {
+        @Override
+        public String get(ContainerRequestContext request, String key) {
+          return request.getHeaderString(key);
+        }
+      };
+
+  private final HttpServerHandler<
+          ExtendedContainerRequest, ContainerResponseContext, ContainerRequestContext>
+      handler;
+
+  @SuppressWarnings("initialization.fields.uninitialized") // Will be injected by JAX-RS
+  @Context
+  private ResourceInfo info;
+
+  /**
+   * Default constructor construct new instance with {@link JaxrsContainerExtractor}, {@link
+   * io.opencensus.trace.propagation.PropagationComponent#getTraceContextFormat()} and as public
+   * endpoint.
+   *
+   * @see #JaxrsContainerFilter(HttpExtractor, TextFormat, Boolean)
+   */
+  public JaxrsContainerFilter() {
+    this(
+        new JaxrsContainerExtractor(),
+        Tracing.getPropagationComponent().getTraceContextFormat(),
+        /* publicEndpoint= */ true);
+  }
+
+  /**
+   * Construct instance with custom configuration.
+   *
+   * @param extractor the {@code HttpExtractor} used to extract information from the
+   *     request/response.
+   * @param propagationFormat the {@code TextFormat} used in HTTP propagation.
+   * @param publicEndpoint set to true for publicly accessible HTTP(S) server. If true then incoming
+   *     tracecontext will be added as a link instead of as a parent.
+   */
+  public JaxrsContainerFilter(
+      HttpExtractor<ExtendedContainerRequest, ContainerResponseContext> extractor,
+      TextFormat propagationFormat,
+      Boolean publicEndpoint) {
+    this.handler =
+        new HttpServerHandler<>(
+            Tracing.getTracer(), extractor, propagationFormat, GETTER, publicEndpoint);
+  }
+
+  @Override
+  @SuppressWarnings("MustBeClosedChecker") // Close will happen in response filter method
+  public void filter(ContainerRequestContext requestContext) throws IOException {
+    ExtendedContainerRequest extendedRequest = new ExtendedContainerRequest(requestContext, info);
+    HttpRequestContext context = handler.handleStart(requestContext, extendedRequest);
+    requestContext.setProperty(CONTEXT_PROPERTY, context);
+    if (requestContext.getLength() > 0) {
+      handler.handleMessageReceived(context, requestContext.getLength());
+    }
+    requestContext.setProperty(
+        SPAN_PROPERTY, Tracing.getTracer().withSpan(handler.getSpanFromContext(context)));
+  }
+
+  @Override
+  public void filter(
+      ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+      throws IOException {
+    HttpRequestContext context = (HttpRequestContext) requestContext.getProperty(CONTEXT_PROPERTY);
+    if (context == null) {
+      // JAX-RS response filters are always invoked - we only want to record something if
+      // request came through this filter
+      return;
+    }
+    Scope scope = (Scope) requestContext.getProperty(SPAN_PROPERTY);
+    if (scope != null) {
+      scope.close();
+    }
+    if (responseContext.getLength() > 0) {
+      handler.handleMessageSent(context, responseContext.getLength());
+    }
+    ExtendedContainerRequest extendedRequest = new ExtendedContainerRequest(requestContext, info);
+    handler.handleEnd(context, extendedRequest, responseContext, null);
+  }
+}
diff --git a/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/Metrics.java b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/Metrics.java
new file mode 100644
index 0000000..acf1650
--- /dev/null
+++ b/contrib/http_jaxrs/src/main/java/io/opencensus/contrib/http/jaxrs/Metrics.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import javax.ws.rs.NameBinding;
+
+/**
+ * Annotation to mark JAX-RS endpoint or method for metrics collection.
+ *
+ * @since 0.19
+ */
+@NameBinding
+@Target({ElementType.TYPE, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Metrics {}
diff --git a/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractorTest.java b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractorTest.java
new file mode 100644
index 0000000..219e75f
--- /dev/null
+++ b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientExtractorTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import javax.ws.rs.client.ClientRequestContext;
+import javax.ws.rs.client.ClientResponseContext;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.Silent.class)
+public class JaxrsClientExtractorTest {
+
+  @Before
+  public void setUp() {
+    // Mockito in this test depends on some class that's only available on JDK 1.8:
+    // TypeDescription$Generic$AnnotationReader$Dispatcher$ForJava8CapableVm
+    Assume.assumeTrue(System.getProperty("java.version").startsWith("1.8"));
+  }
+
+  @Test
+  public void testExtraction() {
+    URI uri = URI.create("https://myhost/resource");
+
+    ClientRequestContext requestContext = mock(ClientRequestContext.class);
+    when(requestContext.getUri()).thenReturn(uri);
+    when(requestContext.getMethod()).thenReturn("GET");
+    when(requestContext.getHeaderString("user-agent")).thenReturn("java/1.8");
+
+    ClientResponseContext responseContext = mock(ClientResponseContext.class);
+    when(responseContext.getStatus()).thenReturn(200);
+
+    JaxrsClientExtractor extractor = new JaxrsClientExtractor();
+    assertEquals("myhost", extractor.getHost(requestContext));
+    assertEquals("GET", extractor.getMethod(requestContext));
+    assertEquals("/resource", extractor.getPath(requestContext));
+    assertNull(extractor.getRoute(requestContext));
+    assertEquals("https://myhost/resource", extractor.getUrl(requestContext));
+    assertEquals("java/1.8", extractor.getUserAgent(requestContext));
+    assertEquals(200, extractor.getStatusCode(responseContext));
+  }
+}
diff --git a/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilterTest.java b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilterTest.java
new file mode 100644
index 0000000..30893cb
--- /dev/null
+++ b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsClientFilterTest.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.tags.TagContext;
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.Status;
+import java.lang.reflect.Constructor;
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.Map;
+import javax.ws.rs.client.ClientRequestContext;
+import javax.ws.rs.client.ClientResponseContext;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.Silent.class)
+public class JaxrsClientFilterTest {
+
+  JaxrsClientFilter filter = new JaxrsClientFilter();
+
+  @Before
+  public void setUp() {
+    // Mockito in this test depends on some class that's only available on JDK 1.8:
+    // TypeDescription$Generic$AnnotationReader$Dispatcher$ForJava8CapableVm
+    Assume.assumeTrue(System.getProperty("java.version").startsWith("1.8"));
+  }
+
+  @Test
+  public void testRequestFilter() throws Exception {
+    URI uri = URI.create("https://mydomain/myresource");
+    ClientRequestContext requestContext = mock(ClientRequestContext.class);
+    when(requestContext.getUri()).thenReturn(uri);
+    filter.filter(requestContext);
+    verify(requestContext).getUri();
+  }
+
+  @Test
+  public void testResponseFilter() throws Exception {
+    Span span = new FakeSpan(SpanContext.INVALID, null);
+    TagContext tagContext = mock(TagContext.class);
+
+    HttpRequestContext context = createHttpRequestContext(span, tagContext);
+
+    ClientRequestContext requestContext = mock(ClientRequestContext.class);
+    when(requestContext.getProperty("opencensus.context")).thenReturn(context);
+
+    ClientResponseContext responseContext = mock(ClientResponseContext.class);
+
+    filter.filter(requestContext, responseContext);
+
+    verify(requestContext).getProperty("opencensus.context");
+    verify(responseContext, times(1)).getStatus();
+  }
+
+  static HttpRequestContext createHttpRequestContext(Span span, TagContext tagContext)
+      throws Exception {
+    Constructor<HttpRequestContext> constructor =
+        HttpRequestContext.class.getDeclaredConstructor(Span.class, TagContext.class);
+    constructor.setAccessible(true);
+    return constructor.newInstance(span, tagContext);
+  }
+
+  static class FakeSpan extends Span {
+
+    public FakeSpan(SpanContext context, EnumSet<Options> options) {
+      super(context, options);
+    }
+
+    @Override
+    public void putAttribute(String key, AttributeValue value) {}
+
+    @Override
+    public void putAttributes(Map<String, AttributeValue> attributes) {}
+
+    @Override
+    public void addAnnotation(String description, Map<String, AttributeValue> attributes) {}
+
+    @Override
+    public void addAnnotation(Annotation annotation) {}
+
+    @Override
+    public void addMessageEvent(MessageEvent messageEvent) {}
+
+    @Override
+    public void addLink(Link link) {}
+
+    @Override
+    public void setStatus(Status status) {}
+
+    @Override
+    public void end(EndSpanOptions options) {}
+  }
+}
diff --git a/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractorTest.java b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractorTest.java
new file mode 100644
index 0000000..8614f89
--- /dev/null
+++ b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerExtractorTest.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import java.util.Collections;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ResourceInfo;
+import javax.ws.rs.core.UriInfo;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.Silent.class)
+public class JaxrsContainerExtractorTest {
+
+  @Before
+  public void setUp() {
+    // Mockito in this test depends on some class that's only available on JDK 1.8:
+    // TypeDescription$Generic$AnnotationReader$Dispatcher$ForJava8CapableVm
+    Assume.assumeTrue(System.getProperty("java.version").startsWith("1.8"));
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testExtraction() throws Exception {
+    UriInfo uriInfo = mock(UriInfo.class);
+    when(uriInfo.getPath()).thenReturn("mypath");
+    when(uriInfo.getMatchedURIs()).thenReturn(Collections.singletonList("/resource/{route}"));
+    when(uriInfo.getRequestUri()).thenReturn(URI.create("https://myhost/resource/1"));
+
+    ContainerRequestContext requestContext = mock(ContainerRequestContext.class);
+    when(requestContext.getHeaderString("host")).thenReturn("myhost");
+    when(requestContext.getMethod()).thenReturn("GET");
+    when(requestContext.getUriInfo()).thenReturn(uriInfo);
+    when(requestContext.getHeaderString("user-agent")).thenReturn("java/1.8");
+
+    ResourceInfo info = mock(ResourceInfo.class);
+    when(info.getResourceClass()).thenReturn((Class) MyResource.class);
+    when(info.getResourceMethod()).thenReturn(MyResource.class.getMethod("route"));
+
+    ExtendedContainerRequest extendedRequest = new ExtendedContainerRequest(requestContext, info);
+
+    ContainerResponseContext responseContext = mock(ContainerResponseContext.class);
+    when(responseContext.getStatus()).thenReturn(200);
+
+    JaxrsContainerExtractor extractor = new JaxrsContainerExtractor();
+    assertEquals("myhost", extractor.getHost(extendedRequest));
+    assertEquals("GET", extractor.getMethod(extendedRequest));
+    assertEquals("mypath", extractor.getPath(extendedRequest));
+    assertEquals("/resource/{route}", extractor.getRoute(extendedRequest));
+    assertEquals("https://myhost/resource/1", extractor.getUrl(extendedRequest));
+    assertEquals("java/1.8", extractor.getUserAgent(extendedRequest));
+    assertEquals(200, extractor.getStatusCode(responseContext));
+  }
+
+  @Path("/resource")
+  static class MyResource {
+
+    @GET
+    @Path("{route}")
+    public String route() {
+      return "OK";
+    }
+  }
+}
diff --git a/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilterTest.java b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilterTest.java
new file mode 100644
index 0000000..cac1263
--- /dev/null
+++ b/contrib/http_jaxrs/src/test/java/io/opencensus/contrib/http/jaxrs/JaxrsContainerFilterTest.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jaxrs;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.jaxrs.JaxrsClientFilterTest.FakeSpan;
+import io.opencensus.tags.TagContext;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.SpanContext;
+import java.util.Collections;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ResourceInfo;
+import javax.ws.rs.core.UriInfo;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.Silent.class)
+public class JaxrsContainerFilterTest {
+
+  @Mock ResourceInfo info;
+
+  @InjectMocks JaxrsContainerFilter filter = new JaxrsContainerFilter();
+
+  @Before
+  public void setUp() {
+    // Mockito in this test depends on some class that's only available on JDK 1.8:
+    // TypeDescription$Generic$AnnotationReader$Dispatcher$ForJava8CapableVm
+    Assume.assumeTrue(System.getProperty("java.version").startsWith("1.8"));
+  }
+
+  @Test
+  public void testRequestFilter() throws Exception {
+    UriInfo uriInfo = mock(UriInfo.class);
+    ContainerRequestContext requestContext = mock(ContainerRequestContext.class);
+    when(requestContext.getUriInfo()).thenReturn(uriInfo);
+    filter.filter(requestContext);
+    verify(requestContext).setProperty(eq("opencensus.context"), any());
+  }
+
+  @Test
+  public void testResponseFilter() throws Exception {
+    Span span = new FakeSpan(SpanContext.INVALID, null);
+    TagContext tagContext = mock(TagContext.class);
+
+    HttpRequestContext context = JaxrsClientFilterTest.createHttpRequestContext(span, tagContext);
+
+    UriInfo uriInfo = mock(UriInfo.class);
+    when(uriInfo.getMatchedURIs()).thenReturn(Collections.singletonList("/resource/{route}"));
+
+    ContainerRequestContext requestContext = mock(ContainerRequestContext.class);
+    when(requestContext.getProperty("opencensus.context")).thenReturn(context);
+    when(requestContext.getUriInfo()).thenReturn(uriInfo);
+
+    ContainerResponseContext responseContext = mock(ContainerResponseContext.class);
+    filter.filter(requestContext, responseContext);
+    verify(requestContext).getProperty("opencensus.context");
+    verify(responseContext, times(1)).getStatus();
+  }
+}
diff --git a/contrib/http_jetty_client/README.md b/contrib/http_jetty_client/README.md
new file mode 100644
index 0000000..da0ea70
--- /dev/null
+++ b/contrib/http_jetty_client/README.md
@@ -0,0 +1,46 @@
+# OpenCensus Jetty HttpClient
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+[![Maven Central][maven-image]][maven-url]
+
+The *OpenCensus Jetty HttpClient for Java* is a wrapper for trace instrumentation when using Jetty as HTTP client.
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-contrib-http-jetty-client</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-http-jetty-client:0.28.3'
+```
+
+## Instrumenting Jetty Http Client
+
+See [http-client][httpclient-code] example. For build and run instruction click [here][httpclient-run].
+
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-http-jetty-client/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-jetty-client
+[httpclient-code]: https://github.com/census-instrumentation/opencensus-java/blob/master/examples/src/main/java/io/opencensus/examples/http/jetty/client/HelloWorldClient.java
+[httpclient-run]: https://github.com/census-instrumentation/opencensus-java/blob/master/examples/README.md#to-run-http-server-and-client
diff --git a/contrib/http_jetty_client/build.gradle b/contrib/http_jetty_client/build.gradle
new file mode 100644
index 0000000..dcf2522
--- /dev/null
+++ b/contrib/http_jetty_client/build.gradle
@@ -0,0 +1,25 @@
+plugins {
+    id 'java'
+}
+
+description = 'OpenCensus Http Jetty Client Plugin'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.6
+    it.targetCompatibility = 1.6
+}
+
+// TODO[rghetia]: jetty 9.3+ requires jdk 8. Http2.0 is supported in jetty 9.3
+// May require creating separate artifact for jetty 9.3 and above.
+//def jettyVersion = "9.4.12.v20180830"
+def jettyVersion = "9.2.25.v20180606"
+
+dependencies {
+    compile project(':opencensus-api')
+    compile project(':opencensus-contrib-http-util')
+
+    compile "org.eclipse.jetty:jetty-client:${jettyVersion}"
+
+    signature "org.codehaus.mojo.signature:java17:1.0@signature"
+    signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
+}
diff --git a/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/HttpRequestListener.java b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/HttpRequestListener.java
new file mode 100644
index 0000000..181304c
--- /dev/null
+++ b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/HttpRequestListener.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.http.HttpClientHandler;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.trace.Span;
+import java.nio.ByteBuffer;
+import javax.annotation.Nullable;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.api.Response;
+import org.eclipse.jetty.client.api.Result;
+
+/** This class extracts attributes from Http Client Request and Response. */
+@ExperimentalApi
+public final class HttpRequestListener
+    implements Request.Listener, Response.ContentListener, Response.CompleteListener {
+
+  private final Span parent;
+  @VisibleForTesting final HttpClientHandler<Request, Response, Request> handler;
+  @VisibleForTesting @Nullable HttpRequestContext context;
+
+  HttpRequestListener(Span parent, HttpClientHandler<Request, Response, Request> handler) {
+    this.parent = parent;
+    this.handler = handler;
+    this.context = null;
+  }
+
+  @Override
+  public void onComplete(Result result) {
+    if (context == null) {
+      return;
+    }
+    if (result != null) {
+      handler.handleEnd(context, result.getRequest(), result.getResponse(), result.getFailure());
+    } else {
+      handler.handleEnd(context, null, null, null);
+    }
+  }
+
+  @Override
+  public void onBegin(Request request) {
+    context = handler.handleStart(parent, request, request);
+  }
+
+  @Override
+  public void onContent(Request request, ByteBuffer content) {
+    if (context != null) {
+      handler.handleMessageSent(context, content.capacity());
+    }
+  }
+
+  @Override
+  public void onContent(Response response, ByteBuffer content) {
+    if (context != null) {
+      handler.handleMessageReceived(context, content.capacity());
+    }
+  }
+
+  @Override
+  public void onCommit(Request request) {}
+
+  @Override
+  public void onFailure(Request request, Throwable failure) {}
+
+  @Override
+  public void onHeaders(Request request) {}
+
+  @Override
+  public void onQueued(Request request) {}
+
+  @Override
+  public void onSuccess(Request request) {}
+}
diff --git a/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClient.java b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClient.java
new file mode 100644
index 0000000..e36d313
--- /dev/null
+++ b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClient.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.http.HttpClientHandler;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import java.net.URI;
+import javax.annotation.Nullable;
+import org.eclipse.jetty.client.HttpClient;
+import org.eclipse.jetty.client.HttpClientTransport;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.api.Response;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+
+/**
+ * This class is a wrapper to {@link HttpClient}. It enables tracing for all {@link Request} created
+ * using this client.
+ *
+ * @since 0.19
+ */
+@ExperimentalApi
+public final class OcJettyHttpClient extends HttpClient {
+  private static final Setter<Request> setter =
+      new Setter<Request>() {
+        @Override
+        public void put(Request carrier, String key, String value) {
+          carrier.header(key, value);
+        }
+      };
+
+  private static final Tracer tracer = Tracing.getTracer();
+  @VisibleForTesting final HttpClientHandler<Request, Response, Request> handler;
+
+  /** Create a new {@code OcJettyHttpClient}. */
+  public OcJettyHttpClient() {
+    super();
+    handler = buildHandler(null, null);
+  }
+
+  /**
+   * Create a new {@code OcJettyHttpClient} with support for HTTPS, extractor and propagator.
+   *
+   * @param transport {@link HttpClientTransport} The transport implementation.
+   * @param sslContextFactory {@link SslContextFactory} Used to configure SSL connectors.
+   * @param extractor {@link HttpExtractor} to extract request and response specific attributes. If
+   *     it is null then default extractor is used.
+   * @param propagator {@link TextFormat} to propagate trace context to remote peer. If it is null
+   *     then default propagator (TraceContextFormat) is used.
+   * @since 0.20
+   */
+  public OcJettyHttpClient(
+      HttpClientTransport transport,
+      SslContextFactory sslContextFactory,
+      @Nullable HttpExtractor<Request, Response> extractor,
+      @Nullable TextFormat propagator) {
+    super(transport, sslContextFactory);
+    handler = buildHandler(extractor, propagator);
+  }
+
+  private static HttpClientHandler<Request, Response, Request> buildHandler(
+      @Nullable HttpExtractor<Request, Response> extractor, @Nullable TextFormat propagator) {
+    if (extractor == null) {
+      extractor = new OcJettyHttpClientExtractor();
+    }
+
+    if (propagator == null) {
+      propagator = Tracing.getPropagationComponent().getTraceContextFormat();
+    }
+
+    return new HttpClientHandler<Request, Response, Request>(
+        Tracing.getTracer(), extractor, propagator, setter);
+  }
+
+  /**
+   * Returns a new request created from a given {@link URI}.
+   *
+   * @param uri {@link URI} to create new request.
+   * @return {@link Request}
+   */
+  @Override
+  public Request newRequest(URI uri) {
+    Request request = super.newRequest(uri);
+    Request.Listener listener = new HttpRequestListener(tracer.getCurrentSpan(), handler);
+    request.listener(listener);
+    request.onComplete((Response.CompleteListener) listener);
+    request.onResponseContent((Response.ContentListener) listener);
+    return request;
+  }
+}
diff --git a/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractor.java b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractor.java
new file mode 100644
index 0000000..b9478ac
--- /dev/null
+++ b/contrib/http_jetty_client/src/main/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractor.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.http.HttpExtractor;
+import java.net.MalformedURLException;
+import javax.annotation.Nullable;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.api.Response;
+
+/** This class extracts attributes from {@link Request} and {@link Response}. */
+@ExperimentalApi
+final class OcJettyHttpClientExtractor extends HttpExtractor<Request, Response> {
+  @Override
+  public String getHost(Request request) {
+    return request.getHost();
+  }
+
+  @Override
+  public String getMethod(Request request) {
+    return request.getMethod();
+  }
+
+  @Override
+  public String getPath(Request request) {
+    return request.getPath();
+  }
+
+  @Override
+  public String getUserAgent(Request request) {
+    return request.getHeaders().get("User-Agent");
+  }
+
+  @Override
+  public int getStatusCode(@Nullable Response response) {
+    if (response != null) {
+      return response.getStatus();
+    }
+    return 0;
+  }
+
+  // TODO[rghetia] : make this configurable for user.
+  @Override
+  public String getRoute(Request request) {
+    return "";
+  }
+
+  @Override
+  public String getUrl(Request request) {
+    try {
+      return request.getURI().toURL().toString();
+    } catch (MalformedURLException e) {
+      return "";
+    }
+  }
+}
diff --git a/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/HttpRequestListenerTest.java b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/HttpRequestListenerTest.java
new file mode 100644
index 0000000..ecb1d0a
--- /dev/null
+++ b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/HttpRequestListenerTest.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.HttpClientHandler;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import javax.annotation.Nullable;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.api.Response;
+import org.eclipse.jetty.client.api.Result;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link HttpRequestListener}. */
+@RunWith(JUnit4.class)
+public class HttpRequestListenerTest {
+  private Request request;
+  private HttpRequestListener listener;
+  private HttpRequestListener listenerWithMockhandler;
+  private HttpRequestContext context;
+  private final Object requestObj = new Object();
+  private final Object responseObj = new Object();
+  static final Setter<Object> setter =
+      new Setter<Object>() {
+        @Override
+        public void put(Object carrier, String key, String val) {}
+      };
+
+  private final HttpExtractor<Object, Object> extractor =
+      new HttpExtractor<Object, Object>() {
+        @Nullable
+        @Override
+        public String getRoute(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getUrl(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getHost(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getMethod(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getPath(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getUserAgent(Object request) {
+          return "";
+        }
+
+        @Override
+        public int getStatusCode(@Nullable Object response) {
+          return 0;
+        }
+      };
+
+  private final HttpClientHandler<Object, Object, Object> handler =
+      new HttpClientHandler<Object, Object, Object>(
+          Tracing.getTracer(),
+          extractor,
+          Tracing.getPropagationComponent().getTraceContextFormat(),
+          setter) {};
+  @Mock private HttpClientHandler<Request, Response, Request> mockHandler;
+  @Mock private Result mockResult;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    OcJettyHttpClient client = new OcJettyHttpClient();
+    request = client.newRequest("http://www.example.com/foo");
+    listener = request.getRequestListeners(HttpRequestListener.class).get(0);
+
+    // for onComplete() test
+    context = handler.handleStart(null, requestObj, responseObj);
+    listenerWithMockhandler = new HttpRequestListener(null, mockHandler);
+  }
+
+  @Test
+  public void testOnBegin() {
+    listener.onBegin(request);
+    assertThat(listener.context).isNotNull();
+  }
+
+  @Test
+  public void testOnCompleteWithNullResult() {
+    listenerWithMockhandler.context = context;
+    listenerWithMockhandler.onComplete(null);
+    verify(mockHandler).handleEnd(context, null, null, null);
+  }
+
+  @Test
+  public void testOnComplete() {
+    listenerWithMockhandler.context = context;
+    when(mockResult.getFailure()).thenReturn(null);
+    when(mockResult.getRequest()).thenReturn(null);
+    when(mockResult.getResponse()).thenReturn(null);
+    listenerWithMockhandler.onComplete(null);
+    verify(mockHandler).handleEnd(context, null, null, null);
+  }
+}
diff --git a/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractorTest.java b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractorTest.java
new file mode 100644
index 0000000..12914d0
--- /dev/null
+++ b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientExtractorTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.api.Response;
+import org.eclipse.jetty.http.HttpField;
+import org.eclipse.jetty.http.HttpFields;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link OcJettyHttpClientExtractor}. */
+@RunWith(JUnit4.class)
+public class OcJettyHttpClientExtractorTest {
+  private static final String URI_STR = "http://localhost/test/foo";
+  private URI uri;
+
+  @Before
+  public void setUp() throws URISyntaxException {
+    uri = new URI(URI_STR);
+  }
+
+  @Test
+  public void testExtraction() {
+    HttpFields fields = new HttpFields();
+    fields.add(new HttpField("User-Agent", "Test 1.0"));
+
+    Request request = mock(Request.class);
+    Response response = mock(Response.class);
+    OcJettyHttpClientExtractor extractor = new OcJettyHttpClientExtractor();
+    when(request.getHost()).thenReturn("localhost");
+    when(request.getMethod()).thenReturn("GET");
+    when(request.getHeaders()).thenReturn(fields);
+    when(request.getPath()).thenReturn("/test");
+    when(request.getURI()).thenReturn(uri);
+    when(response.getStatus()).thenReturn(0);
+
+    assertThat(extractor.getHost(request)).contains("localhost");
+    assertThat(extractor.getMethod(request)).contains("GET");
+    assertThat(extractor.getPath(request)).contains("/test");
+    assertThat(extractor.getUrl(request)).contains(URI_STR);
+    assertThat(extractor.getRoute(request)).contains("");
+    assertThat(extractor.getUserAgent(request)).contains("Test 1.0");
+    assertThat(extractor.getStatusCode(response)).isEqualTo(0);
+  }
+}
diff --git a/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientTest.java b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientTest.java
new file mode 100644
index 0000000..d177024
--- /dev/null
+++ b/contrib/http_jetty_client/src/test/java/io/opencensus/contrib/http/jetty/client/OcJettyHttpClientTest.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.jetty.client;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.trace.Tracing;
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.eclipse.jetty.client.api.Request;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link OcJettyHttpClientTest}. */
+@RunWith(JUnit4.class)
+public class OcJettyHttpClientTest {
+  private static final String URI_STR = "http://localhost/test/foo";
+  private OcJettyHttpClient client;
+
+  @Before
+  public void setUp() {
+    client = new OcJettyHttpClient();
+  }
+
+  @Test
+  public void testOcJettyHttpClientDefault() {
+    OcJettyHttpClient defaultClient = new OcJettyHttpClient();
+    assertThat(defaultClient.handler).isNotNull();
+  }
+
+  @Test
+  public void testOcJettyHttpClientNonDefault() {
+    OcJettyHttpClient defaultClient =
+        new OcJettyHttpClient(
+            null,
+            null,
+            new OcJettyHttpClientExtractor(),
+            Tracing.getPropagationComponent().getB3Format());
+    assertThat(defaultClient.handler).isNotNull();
+  }
+
+  @Test
+  public void testOcJettyHttpClientNullExtractor() {
+    OcJettyHttpClient defaultClient =
+        new OcJettyHttpClient(null, null, null, Tracing.getPropagationComponent().getB3Format());
+    assertThat(defaultClient.handler).isNotNull();
+  }
+
+  @Test
+  public void testOcJettyHttpClientNullPropagator() {
+    OcJettyHttpClient defaultClient =
+        new OcJettyHttpClient(null, null, new OcJettyHttpClientExtractor(), null);
+    assertThat(defaultClient.handler).isNotNull();
+  }
+
+  @Test
+  public void testListerWithUrlString() {
+    Request request = client.newRequest(URI_STR);
+    assertThat(request).isNotNull();
+  }
+
+  @Test
+  public void testListerWithUri() throws URISyntaxException {
+    URI uri = new URI(URI_STR);
+    Request request = client.newRequest(uri);
+    assertThat(request).isNotNull();
+  }
+}
diff --git a/contrib/http_servlet/README.md b/contrib/http_servlet/README.md
new file mode 100644
index 0000000..953730e
--- /dev/null
+++ b/contrib/http_servlet/README.md
@@ -0,0 +1,45 @@
+# OpenCensus Http Servlet Plugin
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+[![Maven Central][maven-image]][maven-url]
+
+The *OpenCensus Http Servlet Plugin for Java* is a plugin for trace instrumentation when using HTTP Servlet 3.0
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-contrib-http-servlet</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-http-servlet:0.28.3'
+```
+
+## Instrumenting HTTP Servlets
+
+See [http-server][httpservlet-code] example. Instruction to build and run the example is [here][httpservlet-run].
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-http-servlet/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-http-servlet
+[httpservlet-run]: https://github.com/census-instrumentation/opencensus-java/tree/master/examples#to-run-http-server-and-client
+[httpservlet-code]: https://github.com/census-instrumentation/opencensus-java/blob/master/examples/src/main/java/io/opencensus/examples/http/jetty/server/HelloWorldServer.java
diff --git a/contrib/http_servlet/build.gradle b/contrib/http_servlet/build.gradle
new file mode 100644
index 0000000..8d51272
--- /dev/null
+++ b/contrib/http_servlet/build.gradle
@@ -0,0 +1,21 @@
+plugins {
+    id 'java'
+}
+
+description = 'OpenCensus Http Servlet Plugin'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.6
+    it.targetCompatibility = 1.6
+}
+
+dependencies {
+    compile libraries.grpc_context
+    compile project(':opencensus-api')
+    compile project(':opencensus-contrib-http-util')
+
+    compile "javax.servlet:javax.servlet-api:3.1.0"
+
+    signature "org.codehaus.mojo.signature:java17:1.0@signature"
+    signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
+}
diff --git a/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractor.java b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractor.java
new file mode 100644
index 0000000..af35e04
--- /dev/null
+++ b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractor.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.http.HttpExtractor;
+import javax.annotation.Nullable;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * This class extracts attributes from {@link HttpServletRequest} and {@link HttpServletResponse}.
+ */
+@ExperimentalApi
+class OcHttpServletExtractor extends HttpExtractor<HttpServletRequest, HttpServletResponse> {
+  @Override
+  public String getHost(HttpServletRequest request) {
+    return request.getServerName();
+  }
+
+  @Override
+  public String getMethod(HttpServletRequest request) {
+    return request.getMethod();
+  }
+
+  @Override
+  public String getPath(HttpServletRequest request) {
+    // Path defined in the spec at
+    // https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes
+    // is equivalent of URI in HttpServlet.
+    return request.getRequestURI();
+  }
+
+  @Override
+  public String getUserAgent(HttpServletRequest request) {
+    return request.getHeader("User-Agent");
+  }
+
+  @Override
+  public int getStatusCode(@Nullable HttpServletResponse response) {
+    if (response != null) {
+      return response.getStatus();
+    }
+    return 0;
+  }
+
+  @Override
+  public String getUrl(HttpServletRequest request) {
+    // Url defined in the spec at
+    // https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes
+    // is equivalent of URL + QueryString in HttpServlet.
+    return request.getRequestURL().toString() + "?" + request.getQueryString();
+  }
+
+  @Override
+  public String getRoute(HttpServletRequest request) {
+    return "";
+  }
+}
diff --git a/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletFilter.java b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletFilter.java
new file mode 100644
index 0000000..2a87f67
--- /dev/null
+++ b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletFilter.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import java.io.IOException;
+import javax.annotation.Nullable;
+import javax.servlet.AsyncContext;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * This filter class implements Filter interface called by web container. The filter is used as an
+ * interceptor to enable tracing of http requests.
+ *
+ * @since 0.19
+ */
+@ExperimentalApi
+public class OcHttpServletFilter implements Filter {
+
+  /**
+   * Set optional OC_TRACE_PROPAGATOR attribute in {@link ServletContext} with {@link TextFormat}
+   * propagator. By default {@code TraceContextFormat} is used to propagate trace context.
+   *
+   * @since 0.20
+   */
+  public static final String OC_TRACE_PROPAGATOR = "opencensus.trace_propagator";
+
+  /**
+   * Set optional OC_EXTRACTOR attribute in {@link ServletContext} with {@link HttpExtractor}
+   * customExtractor. Default extractor is used if custom extractor is not provided.
+   *
+   * @since 0.20
+   */
+  public static final String OC_EXTRACTOR = "opencensus.extractor";
+
+  /**
+   * Set optional OC_PUBLIC_ENDPOINT attribute in {@link ServletContext} with {@link Boolean}
+   * publicEndpoint. set to true for publicly accessible HTTP(S) server. If true then incoming *
+   * tracecontext will be added as a link instead of as a parent. By default it is set to true.
+   *
+   * @since 0.20
+   */
+  public static final String OC_PUBLIC_ENDPOINT = "opencensus.public_endpoint";
+
+  static final String EXCEPTION_MESSAGE = "Invalid value for attribute ";
+
+  @VisibleForTesting
+  static final Getter<HttpServletRequest> getter =
+      new Getter<HttpServletRequest>() {
+        @Nullable
+        @Override
+        public String get(HttpServletRequest carrier, String key) {
+          return carrier.getHeader(key);
+        }
+      };
+
+  @VisibleForTesting
+  HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> handler;
+
+  /** Creates a new {@code OcHttpServletFilter}. */
+  public OcHttpServletFilter() {
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    traceConfig.updateActiveTraceParams(traceConfig.getActiveTraceParams().toBuilder().build());
+    handler = buildHttpServerHandler();
+  }
+
+  static HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest>
+      buildHttpServerHandler() {
+    return buildHttpServerHandlerWithOptions(
+        new OcHttpServletExtractor(),
+        Tracing.getPropagationComponent().getTraceContextFormat(),
+        /* publicEndpoint= */ false);
+  }
+
+  static HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest>
+      buildHttpServerHandlerWithOptions(
+          HttpExtractor<HttpServletRequest, HttpServletResponse> extractor,
+          TextFormat propagator,
+          Boolean publicEndpoint) {
+    return new HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest>(
+        Tracing.getTracer(), extractor, propagator, getter, publicEndpoint);
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public void init(FilterConfig filterConfig) throws ServletException {
+    if (handler == null) {
+      throw new ServletException("Failed to build HttpServerHandler");
+    }
+    TextFormat propagator = null;
+    HttpExtractor<HttpServletRequest, HttpServletResponse> extractor = null;
+    Boolean publicEndpoint = null;
+
+    ServletContext context = filterConfig.getServletContext();
+    Object obj = context.getAttribute(OC_TRACE_PROPAGATOR);
+    if (obj != null) {
+      if (obj instanceof TextFormat) {
+        propagator = (TextFormat) obj;
+      } else {
+        throw new ServletException(EXCEPTION_MESSAGE + OC_TRACE_PROPAGATOR);
+      }
+    } else {
+      propagator = Tracing.getPropagationComponent().getTraceContextFormat();
+    }
+
+    obj = context.getAttribute(OC_EXTRACTOR);
+    if (obj != null) {
+      if (obj instanceof HttpExtractor) {
+        extractor = (HttpExtractor<HttpServletRequest, HttpServletResponse>) obj;
+      } else {
+        throw new ServletException(EXCEPTION_MESSAGE + OC_EXTRACTOR);
+      }
+    } else {
+      extractor = new OcHttpServletExtractor();
+    }
+
+    String publicEndVal = context.getInitParameter(OC_PUBLIC_ENDPOINT);
+    if (publicEndVal != null) {
+      publicEndpoint = Boolean.parseBoolean(publicEndVal);
+    } else {
+      publicEndpoint = false;
+    }
+    handler = buildHttpServerHandlerWithOptions(extractor, propagator, publicEndpoint);
+  }
+
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+      throws IOException, ServletException {
+    // only interested in http requests
+    if ((request instanceof HttpServletRequest) && (response instanceof HttpServletResponse)) {
+      HttpServletRequest httpReq = (HttpServletRequest) request;
+      HttpServletResponse httpResp = (HttpServletResponse) response;
+
+      HttpRequestContext context = handler.handleStart(httpReq, httpReq);
+      OcHttpServletListener listener = new OcHttpServletListener(handler, context);
+      httpReq.setAttribute(OcHttpServletUtil.OPENCENSUS_SERVLET_LISTENER, listener);
+
+      int length = httpReq.getContentLength();
+      if (length > 0) {
+        handler.handleMessageReceived(context, length);
+      }
+
+      Scope scope = Tracing.getTracer().withSpan(handler.getSpanFromContext(context));
+      try {
+        chain.doFilter(httpReq, httpResp);
+      } finally {
+        scope.close();
+      }
+
+      if (httpReq.isAsyncStarted()) {
+        AsyncContext async = httpReq.getAsyncContext();
+        async.addListener(listener, httpReq, httpResp);
+      } else {
+        OcHttpServletUtil.recordMessageSentEvent(handler, context, httpResp);
+        handler.handleEnd(context, httpReq, httpResp, null);
+      }
+    } else {
+      // pass request through unchanged
+      chain.doFilter(request, response);
+    }
+  }
+
+  @Override
+  public void destroy() {}
+}
diff --git a/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletListener.java b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletListener.java
new file mode 100644
index 0000000..0c128b9
--- /dev/null
+++ b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletListener.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.errorprone.annotations.MustBeClosed;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.Tracing;
+import java.io.Closeable;
+import javax.servlet.AsyncContext;
+import javax.servlet.AsyncEvent;
+import javax.servlet.AsyncListener;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * This class implements {@link AsyncListener} to handle span completion for async request handling.
+ */
+@ExperimentalApi
+public final class OcHttpServletListener implements Closeable, AsyncListener {
+  private final HttpRequestContext context;
+  private final HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest>
+      handler;
+
+  OcHttpServletListener(
+      HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> handler,
+      HttpRequestContext context) {
+    checkNotNull(context, "context");
+    checkNotNull(handler, "handler");
+    this.context = context;
+    this.handler = handler;
+  }
+
+  @Override
+  public void close() {}
+
+  @Override
+  public void onComplete(AsyncEvent event) {
+    ServletResponse response = event.getSuppliedResponse();
+    if (response instanceof HttpServletResponse) {
+      OcHttpServletUtil.recordMessageSentEvent(handler, context, (HttpServletResponse) response);
+    }
+    handler.handleEnd(
+        context,
+        (HttpServletRequest) event.getSuppliedRequest(),
+        (HttpServletResponse) event.getSuppliedResponse(),
+        null);
+    this.close();
+  }
+
+  @Override
+  public void onError(AsyncEvent event) {
+    handler.handleEnd(
+        context,
+        (HttpServletRequest) event.getSuppliedRequest(),
+        (HttpServletResponse) event.getSuppliedResponse(),
+        event.getThrowable());
+  }
+
+  @Override
+  public void onStartAsync(AsyncEvent event) {
+    AsyncContext eventAsyncContext = event.getAsyncContext();
+    if (eventAsyncContext != null) {
+      eventAsyncContext.addListener(this, event.getSuppliedRequest(), event.getSuppliedResponse());
+    }
+  }
+
+  @Override
+  public void onTimeout(AsyncEvent event) {
+    handler.handleEnd(
+        context,
+        (HttpServletRequest) event.getSuppliedRequest(),
+        (HttpServletResponse) event.getSuppliedResponse(),
+        null);
+  }
+
+  @MustBeClosed
+  Scope withSpan() {
+    return Tracing.getTracer().withSpan(handler.getSpanFromContext(context));
+  }
+}
diff --git a/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletUtil.java b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletUtil.java
new file mode 100644
index 0000000..b1247ba
--- /dev/null
+++ b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/OcHttpServletUtil.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.errorprone.annotations.MustBeClosed;
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.BlankSpan;
+import io.opencensus.trace.Tracing;
+import javax.servlet.ServletRequest;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class OcHttpServletUtil {
+  static final String CONTENT_LENGTH = "Content-Length";
+  static final String OPENCENSUS_SERVLET_LISTENER = "opencensus.servlet.listener";
+
+  private OcHttpServletUtil() {}
+
+  static void recordMessageSentEvent(
+      HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> handler,
+      HttpRequestContext context,
+      HttpServletResponse response) {
+    if (response != null) {
+      String length = response.getHeader(CONTENT_LENGTH);
+      if (length != null && !length.isEmpty()) {
+        try {
+          handler.handleMessageSent(context, Integer.parseInt(length));
+        } catch (NumberFormatException e) {
+          return;
+        }
+      }
+    }
+  }
+
+  /**
+   * Enters the scope of code where the given {@link ServletRequest} will be processed and returns
+   * an object that represents the scope. The scope is exited when the returned object is closed. A
+   * span created for the {@link ServletRequest} is set to the current Context.
+   *
+   * <p>Supports try-with-resource idiom.
+   *
+   * <p>Example of usage:
+   *
+   * <pre>{@code
+   * void AsyncRequestProcessor(AsyncContext asyncCtx) {
+   *   try (Scope ws = OcHttpServletUtil.withScope(asyncCtx.getRequest) {
+   *     tracer.getCurrentSpan().addAnnotation("my annotation");
+   *     doSomeOtherWork();  // Here "span" is the current Span.
+   *   }
+   * }
+   * }</pre>
+   *
+   * <p>Prior to Java SE 7, you can use a finally block to ensure that a resource is closed
+   * regardless of whether the try statement completes normally or abruptly.
+   *
+   * <p>Example of usage prior to Java SE7:
+   *
+   * <pre>{@code
+   * void AsyncRequestProcessor(AsyncContext asyncCtx) {
+   *   Scope ws = OcHttpServletUtil.withScope(asyncCtx.getRequest)
+   *   try {
+   *     tracer.getCurrentSpan().addAnnotation("my annotation");
+   *     doSomeOtherWork();  // Here "span" is the current Span.
+   *   } finally {
+   *     ws.close();
+   *   }
+   * }
+   * }</pre>
+   *
+   * @param request The {@link ServletRequest} request that is about to be processed.
+   * @return an object that defines a scope where the span associated with the given {@link
+   *     ServletRequest} will be set to the current Context.
+   * @throws NullPointerException if {@code request} is {@code null}.
+   * @since 0.20.0
+   */
+  @MustBeClosed
+  public static Scope withScope(ServletRequest request) {
+    checkNotNull(request, "request");
+    OcHttpServletListener listener =
+        (OcHttpServletListener) request.getAttribute(OPENCENSUS_SERVLET_LISTENER);
+    if (listener != null) {
+      return listener.withSpan();
+    }
+    return Tracing.getTracer().withSpan(BlankSpan.INSTANCE);
+  }
+}
diff --git a/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/WriteListenerWrapper.java b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/WriteListenerWrapper.java
new file mode 100644
index 0000000..591f7a5
--- /dev/null
+++ b/contrib/http_servlet/src/main/java/io/opencensus/contrib/http/servlet/WriteListenerWrapper.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import io.grpc.Context;
+import io.opencensus.common.ExperimentalApi;
+import java.io.IOException;
+import javax.servlet.WriteListener;
+
+/**
+ * This class is a wrapper class for {@link WriteListener}. It facilitates executing asynchronous
+ * onWritePossible method in the original context at the time of creating the listener.
+ *
+ * @since 0.25.0
+ */
+@ExperimentalApi
+public class WriteListenerWrapper implements WriteListener {
+  private final io.grpc.Context context;
+  private final WriteListener writeListener;
+
+  /**
+   * Creates an instance of {@code WriteListenerWrapper}. It saves current {@link Context} at the
+   * time of creation.
+   *
+   * @param writeListener {@link WriteListener} object being wrapped.
+   * @since 0.25.0
+   */
+  public WriteListenerWrapper(WriteListener writeListener) {
+    checkNotNull(writeListener, "WriteListener is null");
+    context = io.grpc.Context.current();
+    this.writeListener = writeListener;
+  }
+
+  /**
+   * It executes onWritePossible() method of the object being wrapped in the saved context. It saves
+   * current context before executing the method and restores it after it is finished executing.
+   *
+   * @since 0.25.0
+   */
+  @Override
+  public void onWritePossible() throws IOException {
+    Context previousContext = context.attach();
+    try {
+      writeListener.onWritePossible();
+    } finally {
+      context.detach(previousContext);
+    }
+  }
+
+  @Override
+  public void onError(final Throwable t) {
+    writeListener.onError(t);
+  }
+}
diff --git a/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractorTest.java b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractorTest.java
new file mode 100644
index 0000000..037594e
--- /dev/null
+++ b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletExtractorTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link OcHttpServletExtractor}. */
+@RunWith(JUnit4.class)
+public class OcHttpServletExtractorTest {
+
+  @SuppressWarnings("JdkObsolete")
+  private final StringBuffer urlBuffer = new StringBuffer("http://example.com:8080/user/foo");
+
+  @Test
+  public void testExtraction() {
+    HttpServletRequest request = mock(HttpServletRequest.class);
+    HttpServletResponse response = mock(HttpServletResponse.class);
+    OcHttpServletExtractor extractor = new OcHttpServletExtractor();
+    when(request.getServerName()).thenReturn("example.com");
+    when(request.getMethod()).thenReturn("GET");
+    when(request.getHeader("User-Agent")).thenReturn("Test 1.0");
+    when(request.getRequestURI()).thenReturn("/user/foo");
+    when(request.getQueryString()).thenReturn("a=b");
+    when(response.getStatus()).thenReturn(0);
+    when(request.getRequestURL()).thenReturn(urlBuffer);
+
+    assertThat(extractor.getHost(request)).contains("example.com");
+    assertThat(extractor.getMethod(request)).contains("GET");
+    assertThat(extractor.getPath(request)).contains("/user/foo");
+    assertThat(extractor.getUserAgent(request)).contains("Test 1.0");
+    assertThat(extractor.getStatusCode(response)).isEqualTo(0);
+    assertThat(extractor.getRoute(request)).contains("");
+    assertThat(extractor.getUrl(request)).contains(urlBuffer.toString() + "?" + "a=b");
+  }
+}
diff --git a/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletFilterTest.java b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletFilterTest.java
new file mode 100644
index 0000000..90fa4cf
--- /dev/null
+++ b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletFilterTest.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.contrib.http.servlet.OcHttpServletFilter.EXCEPTION_MESSAGE;
+import static io.opencensus.contrib.http.servlet.OcHttpServletFilter.OC_EXTRACTOR;
+import static io.opencensus.contrib.http.servlet.OcHttpServletFilter.OC_PUBLIC_ENDPOINT;
+import static io.opencensus.contrib.http.servlet.OcHttpServletFilter.OC_TRACE_PROPAGATOR;
+import static io.opencensus.contrib.http.servlet.OcHttpServletUtil.CONTENT_LENGTH;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import java.io.IOException;
+import java.util.List;
+import javax.servlet.AsyncContext;
+import javax.servlet.AsyncListener;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.Spy;
+
+/** Unit tests for {@link OcHttpServletFilter}. */
+@RunWith(JUnit4.class)
+public class OcHttpServletFilterTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @Mock HttpServletResponse mockResponse;
+  @Mock HttpServletRequest mockRequest;
+  @Mock FilterChain mockChain;
+  @Mock FilterConfig mockConfig;
+  @Mock HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> mockHandler;
+  @Mock Span mockSpan;
+  @Mock AsyncContext mockAsyncContext;
+  @Mock HttpRequestContext mockContext;
+  @Mock ServletContext mockServletContext;
+  TextFormat b3Propagator;
+  HttpExtractor<HttpServletRequest, HttpServletResponse> customExtractor;
+  @Spy OcHttpServletFilter filter = new OcHttpServletFilter();
+  @Captor ArgumentCaptor<String> stringArgumentCaptor;
+  Object dummyAttr = new Object();
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    b3Propagator = Tracing.getPropagationComponent().getB3Format();
+    customExtractor = new OcHttpServletExtractor();
+  }
+
+  @Test
+  public void testInit() throws IOException, ServletException {
+
+    HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> oldHandler =
+        filter.handler;
+    when(mockConfig.getServletContext()).thenReturn(mockServletContext);
+    when(mockServletContext.getAttribute(OC_TRACE_PROPAGATOR)).thenReturn(null);
+    when(mockServletContext.getAttribute(OC_EXTRACTOR)).thenReturn(null);
+    when(mockServletContext.getInitParameter(OC_PUBLIC_ENDPOINT)).thenReturn(null);
+
+    filter.init(mockConfig);
+
+    verify(mockConfig).getServletContext();
+    verify(mockServletContext, times(2)).getAttribute(stringArgumentCaptor.capture());
+    verify(mockServletContext).getInitParameter(stringArgumentCaptor.capture());
+
+    List<String> attributes = stringArgumentCaptor.getAllValues();
+    assertThat(attributes.contains(OC_TRACE_PROPAGATOR)).isTrue();
+    assertThat(attributes.contains(OC_EXTRACTOR)).isTrue();
+    assertThat(filter.handler).isNotEqualTo(oldHandler);
+    assertThat(filter.handler).isNotNull();
+  }
+
+  @Test
+  public void testInitWithOptions() throws IOException, ServletException {
+
+    HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> oldHandler =
+        filter.handler;
+    when(mockConfig.getServletContext()).thenReturn(mockServletContext);
+    when(mockServletContext.getAttribute(OC_TRACE_PROPAGATOR)).thenReturn(b3Propagator);
+    when(mockServletContext.getAttribute(OC_EXTRACTOR)).thenReturn(customExtractor);
+    when(mockServletContext.getInitParameter(OC_PUBLIC_ENDPOINT)).thenReturn("false");
+
+    filter.init(mockConfig);
+
+    verify(mockConfig).getServletContext();
+    verify(mockServletContext, times(2)).getAttribute(stringArgumentCaptor.capture());
+    verify(mockServletContext).getInitParameter(stringArgumentCaptor.capture());
+
+    List<String> attributes = stringArgumentCaptor.getAllValues();
+    assertThat(attributes.contains(OC_TRACE_PROPAGATOR)).isTrue();
+    assertThat(attributes.contains(OC_EXTRACTOR)).isTrue();
+    assertThat(filter.handler).isNotEqualTo(oldHandler);
+    assertThat(filter.handler).isNotNull();
+  }
+
+  private void testInitInvalidAttr(String attr) throws ServletException {
+    when(mockConfig.getServletContext()).thenReturn(mockServletContext);
+    when(mockServletContext.getAttribute(attr)).thenReturn(dummyAttr);
+    thrown.expect(ServletException.class);
+    thrown.expectMessage(EXCEPTION_MESSAGE + attr);
+
+    filter.init(mockConfig);
+  }
+
+  @Test
+  public void testInitInvalidExtractor() throws ServletException {
+    testInitInvalidAttr(OC_EXTRACTOR);
+  }
+
+  @Test
+  public void testInitInvalidPropagator() throws ServletException {
+    testInitInvalidAttr(OC_TRACE_PROPAGATOR);
+  }
+
+  @Test
+  public void testDoFilterSync() throws IOException, ServletException {
+
+    OcHttpServletFilter filter = new OcHttpServletFilter();
+
+    when(mockRequest.isAsyncStarted()).thenReturn(false);
+    when(mockResponse.getHeader("")).thenReturn("");
+    when(mockResponse.getHeader(CONTENT_LENGTH)).thenReturn("10");
+    when(mockRequest.getContentLength()).thenReturn(10);
+
+    filter.doFilter(mockRequest, mockResponse, mockChain);
+    verify(mockRequest).getContentLength();
+  }
+
+  @Test
+  public void testDoFilterAsync() throws IOException, ServletException {
+
+    OcHttpServletFilter filter = new OcHttpServletFilter();
+
+    when(mockRequest.isAsyncStarted()).thenReturn(true);
+    when(mockResponse.getHeader("")).thenReturn("");
+    when(mockResponse.getHeader(CONTENT_LENGTH)).thenReturn("10");
+    when(mockRequest.getContentLength()).thenReturn(10);
+    when(mockRequest.getAsyncContext()).thenReturn(mockAsyncContext);
+    doNothing()
+        .when(mockAsyncContext)
+        .addListener(
+            any(AsyncListener.class),
+            any(HttpServletRequest.class),
+            any(HttpServletResponse.class));
+
+    filter.doFilter(mockRequest, mockResponse, mockChain);
+    verify(mockResponse, never()).getHeader(CONTENT_LENGTH);
+    verify(mockRequest).getContentLength();
+    verify(mockAsyncContext)
+        .addListener(
+            any(AsyncListener.class),
+            any(HttpServletRequest.class),
+            any(HttpServletResponse.class));
+  }
+}
diff --git a/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletListenerTest.java b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletListenerTest.java
new file mode 100644
index 0000000..d60e519
--- /dev/null
+++ b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/OcHttpServletListenerTest.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static io.opencensus.contrib.http.servlet.OcHttpServletUtil.CONTENT_LENGTH;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.contrib.http.HttpServerHandler;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import javax.annotation.Nullable;
+import javax.servlet.AsyncContext;
+import javax.servlet.AsyncEvent;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link OcHttpServletListener}. */
+@RunWith(JUnit4.class)
+public class OcHttpServletListenerTest {
+
+  static final Getter<Object> getter =
+      new Getter<Object>() {
+        @Nullable
+        @Override
+        public String get(Object carrier, String key) {
+          return "";
+        }
+      };
+  private final Object request = new Object();
+  private final Object response = new Object();
+  private final HttpExtractor<Object, Object> extractor =
+      new HttpExtractor<Object, Object>() {
+        @Nullable
+        @Override
+        public String getRoute(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getUrl(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getHost(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getMethod(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getPath(Object request) {
+          return "";
+        }
+
+        @Nullable
+        @Override
+        public String getUserAgent(Object request) {
+          return "";
+        }
+
+        @Override
+        public int getStatusCode(@Nullable Object response) {
+          return 0;
+        }
+      };
+  private final HttpServerHandler<Object, Object, Object> handler =
+      new HttpServerHandler<Object, Object, Object>(
+          Tracing.getTracer(),
+          extractor,
+          Tracing.getPropagationComponent().getTraceContextFormat(),
+          getter,
+          true) {};
+  @Mock HttpRequestContext mockContext;
+  @Mock HttpServletResponse mockResponse;
+  @Mock HttpServletRequest mockRequest;
+  @Mock HttpServerHandler<HttpServletRequest, HttpServletResponse, HttpServletRequest> mockHandler;
+  @Mock AsyncEvent mockAsyncEvent;
+  @Mock Throwable mockThrowable;
+  @Mock AsyncContext mockAsyncContext;
+  OcHttpServletListener listener;
+  private HttpRequestContext context;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    context = handler.handleStart(request, response);
+
+    listener = new OcHttpServletListener(mockHandler, context);
+
+    when(mockHandler.handleStart(mockRequest, mockRequest)).thenReturn(context);
+    when(mockAsyncEvent.getThrowable()).thenReturn(mockThrowable);
+    when(mockAsyncEvent.getSuppliedResponse()).thenReturn(mockResponse);
+    when(mockAsyncEvent.getSuppliedRequest()).thenReturn(mockRequest);
+    when(mockAsyncEvent.getAsyncContext()).thenReturn(mockAsyncContext);
+    when(mockResponse.getHeader(CONTENT_LENGTH)).thenReturn("10");
+    when(mockRequest.getContentLength()).thenReturn(10);
+
+    doNothing().when(mockHandler).handleEnd(mockContext, mockRequest, mockResponse, null);
+  }
+
+  @Test
+  public void testOnComplete() {
+    listener.onComplete(mockAsyncEvent);
+    verify(mockHandler).handleEnd(context, mockRequest, mockResponse, null);
+  }
+
+  @Test
+  public void testOnTimeout() {
+    listener.onTimeout(mockAsyncEvent);
+    verify(mockHandler).handleEnd(context, mockRequest, mockResponse, null);
+  }
+
+  @Test
+  public void testOnError() {
+    doNothing().when(mockHandler).handleEnd(mockContext, mockRequest, mockResponse, mockThrowable);
+    listener.onError(mockAsyncEvent);
+    verify(mockHandler).handleEnd(context, mockRequest, mockResponse, mockThrowable);
+  }
+
+  @Test
+  public void testOnStartAsync() {
+    doNothing().when(mockHandler).handleEnd(mockContext, mockRequest, mockResponse, null);
+    listener.onStartAsync(mockAsyncEvent);
+    verify(mockHandler, never()).handleEnd(mockContext, mockRequest, mockResponse, null);
+    verify(mockAsyncContext)
+        .addListener(
+            any(OcHttpServletListener.class),
+            any(ServletRequest.class),
+            any(ServletResponse.class));
+  }
+}
diff --git a/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/WriteListenerWrapperTest.java b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/WriteListenerWrapperTest.java
new file mode 100644
index 0000000..17f3d84
--- /dev/null
+++ b/contrib/http_servlet/src/test/java/io/opencensus/contrib/http/servlet/WriteListenerWrapperTest.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.servlet;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.grpc.Context;
+import io.grpc.Context.Key;
+import java.io.IOException;
+import javax.servlet.ServletException;
+import javax.servlet.WriteListener;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link WriteListenerWrapper}. */
+@RunWith(JUnit4.class)
+public class WriteListenerWrapperTest {
+
+  @Test
+  public void testWriteOnPossibleWithContext() throws IOException, ServletException {
+
+    Key<String> key = Context.<String>key("test-key");
+    Context curr = Context.current();
+    assertThat(curr).isNotNull();
+    final Context parentContext = curr.withValue(key, "parent");
+    assertThat(parentContext).isNotNull();
+
+    Context prev = parentContext.attach();
+    try {
+      WriteListenerWrapper writeListener =
+          new WriteListenerWrapper(
+              new WriteListener() {
+                @Override
+                public void onWritePossible() throws IOException {
+                  Context curr = Context.current();
+                  assertThat(curr).isNotNull();
+                  assertThat(curr).isEqualTo(parentContext);
+                }
+
+                @Override
+                public void onError(Throwable t) {}
+              });
+
+      Context childContext = parentContext.withValue(key, "child");
+      assertThat(childContext).isNotNull();
+      assertThat(childContext.attach()).isNotNull();
+      try {
+        writeListener.onWritePossible();
+      } finally {
+        childContext.detach(parentContext);
+      }
+    } finally {
+      parentContext.detach(prev);
+    }
+  }
+}
diff --git a/contrib/http_util/README.md b/contrib/http_util/README.md
index 9678fcb..b13915f 100644
--- a/contrib/http_util/README.md
+++ b/contrib/http_util/README.md
@@ -16,22 +16,160 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-http-util</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-contrib-http-util:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-http-util:0.28.3'
 ```
 
+## Instrumenting HTTP libraries/frameworks
+
+### customization for libraries/frameworks
+
+Users can implement `HttpExtractor` to customize what information are extracted from the HTTP
+request/response entity.
+
+If context propagation is enabled, users need to provide framework specific `TextFormat.Setter`
+and `TextFormat.Getter`. They are used to inject/extract information into/from the `Carrier` of
+the request. The `Carrier` can be the request itself or other objects, as long as it provides
+functionalities of setting/getting HTTP attributes.
+
+Below is an example of how the customization for libraries/frameworks should be done:
+
+```java
+// // Http request entity in the library/framework.
+// public class HttpRequest {
+//   ...
+// }
+//
+// // Http response entity in the library/framework.
+// public class HttpResponse {
+//   ...
+// }
+
+// use the HttpRequest itself as Carrier.
+TextFormat.Setter<HttpRequest> myTextFormatSetter =
+    new TextFormat.Setter<HttpRequest>() {
+      @Override
+      public void put(HttpRequest carrier, String key, String value) {
+        carrier.setHeader(key, value);
+      }
+    };
+TextFormat.Getter<HttpRequest> myTextFormatGetter =
+    new TextFormat.Getter<HttpRequest>() {
+      @Override
+      public String get(HttpRequest carrier, String key) {
+        return carrier.getHeader(key);
+      }
+    };
+HttpExtractor<HttpRequest, HttpResponse> extractor =
+    new HttpExtractor<HttpRequest, HttpResponse>() {
+      @Override
+      public Integer getStatusCode(HttpResponse response) {
+        return response.getStatusCode();
+      }
+
+      // other methods that need to be overridden
+      // ...
+    };
+```
+
+### Client
+
+Users can create a `HttpClientHandler` to help instrument client-side HTTP request/response.
+
+An example usage of the handler would be:
+
+```java
+HttpClientHandler<HttpRequest, HttpResponse, HttpRequest> handler =
+    new HttpClientHandler<HttpRequest, HttpResponse>(
+        tracer, extractor, myTextFormat, myTextFormatSetter);
+
+// Use #handleStart in client to start a new span.
+// Use `null` if you want to use current Span as the parent Span.
+HttpRequestContext context = handler.handleStart(null, request, request);
+HttpResponse response = null;
+Throwable error = null;
+try {
+  // Do something to send the request, and get response code from the server
+  response = getResponse(request);
+
+  // Optionally, use #handleMessageSent in client to log a SENT event and its size.
+  handler.handleMessageSent(context, request.getContentLength());
+
+  // Optionally, use #handleMessageReceived in client to log a RECEIVED event and message size.
+  handler.handleMessageReceived(context, response.getContentLength());
+} catch (Throwable e) {
+  error = e;
+} finally {
+  // Use #handleEnd in client to close the span.
+  handler.handleEnd(context, request, response, error);
+}
+```
+
+### Server
+
+Users can create a `HttpServerHandler` to help instrument server-side HTTP request/response.
+
+An example usage of the handler would be:
+
+```java
+HttpServerHandler<HttpRequest, HttpResponse> handler =
+    new HttpServerHandler<HttpRequest, HttpResponse, HttpRequest>(
+        tracer, extractor, myTextFormat, myTextFormatGetter,
+        false /* true if it is public endpoint */);
+
+// Use #handleStart in server to start a new span.
+HttpRequestContext context = handler.handleStart(request, request);
+HttpResponse response = constructResponse();
+Throwable error = null;
+try (Scope scope = tracer.withSpan(handler.getSpanFromContext(context))) {
+  // Do something to decide whether to serve the request or early exit.
+  // For example, client may expect a 100 Continue before sending the message body.
+  if (extractor.getRequestSize(request) > REQUEST_LIMIT) {
+    response.setStatus(413);
+  } else {
+    response.setStatus(100);
+    String content = request.getContent();
+
+    // Optionally, use #handleMessageReceived in server to log a RECEIVED event and its size.
+    handler.handleMessageReceived(context, request.getContentLength());
+
+    // Do something to prepare the response or exception.
+    response.setStatus(201);
+    response.write("OK");
+    response.flush();
+
+    // Optionally, use #handleMessageSent in server to log a SENT message event and its message size.
+    handler.handleMessageSent(context, response.getContentLength());
+  } catch (Throwable e) {
+    error = e;
+  } finally {
+    // Use #handleEnd in server to close the span.
+    handler.handleEnd(context, request, response, error);
+  }
+}
+```
+
+### handling async calls
+
+In asynchronous HTTP calls, message receiving and sending may happen in different
+threads. Users need to ensure the started span (as well as scope, if any) is
+closed or ended no matter the call is successful or not.
+
+To do that, store current scope and span somewhere, e.g. the context of the channel,
+and close them before the channel exits.
+
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
 [travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
 [appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/AbstractHttpHandler.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/AbstractHttpHandler.java
new file mode 100644
index 0000000..dc5ad4d
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/AbstractHttpHandler.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.contrib.http.util.HttpTraceAttributeConstants;
+import io.opencensus.contrib.http.util.HttpTraceUtil;
+import io.opencensus.tags.TagContext;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.MessageEvent.Type;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Options;
+import javax.annotation.Nullable;
+
+/** Base class for handling request on http client and server. */
+abstract class AbstractHttpHandler<Q, P> {
+  /** The {@link HttpExtractor} used to extract information from request/response. */
+  @VisibleForTesting final HttpExtractor<Q, P> extractor;
+
+  /** Constructor to allow access from same package subclasses only. */
+  AbstractHttpHandler(HttpExtractor<Q, P> extractor) {
+    checkNotNull(extractor, "extractor");
+    this.extractor = extractor;
+  }
+
+  /**
+   * A convenience to record a {@link MessageEvent} with given parameters.
+   *
+   * @param span the span which this {@code MessageEvent} will be added to.
+   * @param id the id of the event.
+   * @param type the {@code MessageEvent.Type} of the event.
+   * @param uncompressedMessageSize size of the message before compressed (optional).
+   * @param compressedMessageSize size of the message after compressed (optional).
+   * @since 0.19
+   */
+  static void recordMessageEvent(
+      Span span, long id, Type type, long uncompressedMessageSize, long compressedMessageSize) {
+    MessageEvent messageEvent =
+        MessageEvent.builder(type, id)
+            .setUncompressedMessageSize(uncompressedMessageSize)
+            .setCompressedMessageSize(compressedMessageSize)
+            .build();
+    span.addMessageEvent(messageEvent);
+  }
+
+  private static void putAttributeIfNotEmptyOrNull(Span span, String key, @Nullable String value) {
+    if (value != null && !value.isEmpty()) {
+      span.putAttribute(key, AttributeValue.stringAttributeValue(value));
+    }
+  }
+
+  /**
+   * Instrument an HTTP span after a message is sent. Typically called for every chunk of request or
+   * response is sent.
+   *
+   * @param context request specific {@link HttpRequestContext}
+   * @param bytes bytes sent.
+   * @since 0.19
+   */
+  public final void handleMessageSent(HttpRequestContext context, long bytes) {
+    checkNotNull(context, "context");
+    context.sentMessageSize.addAndGet(bytes);
+    if (context.span.getOptions().contains(Options.RECORD_EVENTS)) {
+      // record compressed size
+      recordMessageEvent(context.span, context.sentSeqId.addAndGet(1L), Type.SENT, bytes, 0L);
+    }
+  }
+
+  /**
+   * Instrument an HTTP span after a message is received. Typically called for every chunk of
+   * request or response is received.
+   *
+   * @param context request specific {@link HttpRequestContext}
+   * @param bytes bytes received.
+   * @since 0.19
+   */
+  public final void handleMessageReceived(HttpRequestContext context, long bytes) {
+    checkNotNull(context, "context");
+    context.receiveMessageSize.addAndGet(bytes);
+    if (context.span.getOptions().contains(Options.RECORD_EVENTS)) {
+      // record compressed size
+      recordMessageEvent(
+          context.span, context.receviedSeqId.addAndGet(1L), Type.RECEIVED, bytes, 0L);
+    }
+  }
+
+  void spanEnd(Span span, int httpStatus, @Nullable Throwable error) {
+    if (span.getOptions().contains(Options.RECORD_EVENTS)) {
+      span.putAttribute(
+          HttpTraceAttributeConstants.HTTP_STATUS_CODE,
+          AttributeValue.longAttributeValue(httpStatus));
+      span.setStatus(HttpTraceUtil.parseResponseStatus(httpStatus, error));
+    }
+    span.end();
+  }
+
+  final String getSpanName(Q request, HttpExtractor<Q, P> extractor) {
+    // default span name
+    String path = extractor.getPath(request);
+    if (path == null) {
+      path = "/";
+    }
+    if (!path.startsWith("/")) {
+      path = "/" + path;
+    }
+    return path;
+  }
+
+  final void addSpanRequestAttributes(Span span, Q request, HttpExtractor<Q, P> extractor) {
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_USER_AGENT, extractor.getUserAgent(request));
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_HOST, extractor.getHost(request));
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_METHOD, extractor.getMethod(request));
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_PATH, extractor.getPath(request));
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_ROUTE, extractor.getRoute(request));
+    putAttributeIfNotEmptyOrNull(
+        span, HttpTraceAttributeConstants.HTTP_URL, extractor.getUrl(request));
+  }
+
+  /**
+   * Retrieves {@link Span} from the {@link HttpRequestContext}.
+   *
+   * @param context request specific {@link HttpRequestContext}
+   * @return {@link Span} associated with the request.
+   * @since 0.19
+   */
+  public Span getSpanFromContext(HttpRequestContext context) {
+    checkNotNull(context, "context");
+    return context.span;
+  }
+
+  HttpRequestContext getNewContext(Span span, TagContext tagContext) {
+    return new HttpRequestContext(span, tagContext);
+  }
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpClientHandler.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpClientHandler.java
new file mode 100644
index 0000000..44fad19
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpClientHandler.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static io.opencensus.contrib.http.HttpRequestContext.METADATA_NO_PROPAGATION;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_HOST;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_METHOD;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_RECEIVED_BYTES;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_ROUNDTRIP_LATENCY;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_SENT_BYTES;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_STATUS;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Kind;
+import io.opencensus.trace.Span.Options;
+import io.opencensus.trace.SpanBuilder;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.propagation.TextFormat;
+import javax.annotation.Nullable;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
+/**
+ * This helper class provides routine methods to instrument HTTP clients.
+ *
+ * @param <Q> the HTTP request entity.
+ * @param <P> the HTTP response entity.
+ * @param <C> the type of the carrier.
+ * @since 0.19
+ */
+@ExperimentalApi
+public class HttpClientHandler<
+        Q /*>>> extends @NonNull Object*/, P, C /*>>> extends @NonNull Object*/>
+    extends AbstractHttpHandler<Q, P> {
+
+  private final TextFormat.Setter<C> setter;
+  private final TextFormat textFormat;
+  private final Tracer tracer;
+  private final StatsRecorder statsRecorder;
+  private final Tagger tagger;
+
+  /**
+   * Creates a {@link HttpClientHandler} with given parameters.
+   *
+   * @param tracer the Open Census tracing component.
+   * @param extractor the {@code HttpExtractor} used to extract information from the
+   *     request/response.
+   * @param textFormat the {@code TextFormat} used in HTTP propagation.
+   * @param setter the setter used when injecting information to the {@code carrier}.
+   * @since 0.19
+   */
+  public HttpClientHandler(
+      Tracer tracer,
+      HttpExtractor<Q, P> extractor,
+      TextFormat textFormat,
+      TextFormat.Setter<C> setter) {
+    super(extractor);
+    checkNotNull(setter, "setter");
+    checkNotNull(textFormat, "textFormat");
+    checkNotNull(tracer, "tracer");
+    this.setter = setter;
+    this.textFormat = textFormat;
+    this.tracer = tracer;
+    this.statsRecorder = Stats.getStatsRecorder();
+    this.tagger = Tags.getTagger();
+  }
+
+  /**
+   * Instrument a request for tracing and stats before it is sent.
+   *
+   * <p>This method will create a span in current context to represent the HTTP call. The created
+   * span will be serialized and propagated to the server.
+   *
+   * <p>The generated span will NOT be set as current context. User can control when to enter the
+   * scope of this span. Use {@link AbstractHttpHandler#getSpanFromContext} to retrieve the span.
+   *
+   * @param parent the parent {@link Span}. {@code null} indicates using current span.
+   * @param carrier the entity that holds the HTTP information.
+   * @param request the request entity.
+   * @return the {@link HttpRequestContext} that contains stats and trace data associated with the
+   *     request.
+   * @since 0.19
+   */
+  public HttpRequestContext handleStart(@Nullable Span parent, C carrier, Q request) {
+    checkNotNull(carrier, "carrier");
+    checkNotNull(request, "request");
+    if (parent == null) {
+      parent = tracer.getCurrentSpan();
+    }
+    String spanName = getSpanName(request, extractor);
+    SpanBuilder builder = tracer.spanBuilderWithExplicitParent(spanName, parent);
+    Span span = builder.setSpanKind(Kind.CLIENT).startSpan();
+
+    if (span.getOptions().contains(Options.RECORD_EVENTS)) {
+      addSpanRequestAttributes(span, request, extractor);
+    }
+
+    // inject propagation header
+    SpanContext spanContext = span.getContext();
+    if (!spanContext.equals(SpanContext.INVALID)) {
+      textFormat.inject(spanContext, carrier, setter);
+    }
+    return getNewContext(span, tagger.getCurrentTagContext());
+  }
+
+  /**
+   * Close an HTTP span and records stats specific to the request.
+   *
+   * <p>This method will set status of the span and end it. Additionally it will record measurements
+   * associated with the request.
+   *
+   * @param context the {@link HttpRequestContext} returned from {@link
+   *     HttpClientHandler#handleStart(Span, Object, Object)}
+   * @param request the HTTP request entity.
+   * @param response the HTTP response entity. {@code null} means invalid response.
+   * @param error the error occurs when processing the response.
+   * @since 0.19
+   */
+  public void handleEnd(
+      HttpRequestContext context,
+      @Nullable Q request,
+      @Nullable P response,
+      @Nullable Throwable error) {
+    checkNotNull(context, "context");
+    int httpCode = extractor.getStatusCode(response);
+    recordStats(context, request, httpCode);
+    spanEnd(context.span, httpCode, error);
+  }
+
+  private void recordStats(HttpRequestContext context, @Nullable Q request, int httpCode) {
+    double requestLatency = NANOSECONDS.toMillis(System.nanoTime() - context.requestStartTime);
+
+    String methodStr = request == null ? "" : extractor.getMethod(request);
+    String host = request == null ? "null_request" : extractor.getHost(request);
+
+    TagContext startCtx =
+        tagger
+            .toBuilder(context.tagContext)
+            .put(
+                HTTP_CLIENT_HOST,
+                TagValue.create(host == null ? "null_host" : host),
+                METADATA_NO_PROPAGATION)
+            .put(
+                HTTP_CLIENT_METHOD,
+                TagValue.create(methodStr == null ? "" : methodStr),
+                METADATA_NO_PROPAGATION)
+            .put(
+                HTTP_CLIENT_STATUS,
+                TagValue.create(httpCode == 0 ? "error" : Integer.toString(httpCode)),
+                METADATA_NO_PROPAGATION)
+            .build();
+
+    statsRecorder
+        .newMeasureMap()
+        .put(HTTP_CLIENT_ROUNDTRIP_LATENCY, requestLatency)
+        .put(HTTP_CLIENT_SENT_BYTES, context.sentMessageSize.get())
+        .put(HTTP_CLIENT_RECEIVED_BYTES, context.receiveMessageSize.get())
+        .record(startCtx);
+  }
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpExtractor.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpExtractor.java
new file mode 100644
index 0000000..50bcb50
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpExtractor.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import io.opencensus.common.ExperimentalApi;
+import javax.annotation.Nullable;
+
+/**
+ * An adaptor to extract information from request and response.
+ *
+ * <p>This class provides no-op implementations by default.
+ *
+ * <p>Please refer to this <a
+ * href="https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md">document</a>
+ * for more information about the HTTP attributes recorded in Open Census.
+ *
+ * @param <Q> the HTTP request entity.
+ * @param <P> the HTTP response entity.
+ * @since 0.19
+ */
+@ExperimentalApi
+public abstract class HttpExtractor<Q, P> {
+
+  /**
+   * Returns the request route.
+   *
+   * @param request the HTTP request.
+   * @return the request route.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getRoute(Q request);
+
+  /**
+   * Returns the request URL.
+   *
+   * @param request the HTTP request.
+   * @return the request URL.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getUrl(Q request);
+
+  /**
+   * Returns the request URL host.
+   *
+   * @param request the HTTP request.
+   * @return the request URL host.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getHost(Q request);
+
+  /**
+   * Returns the request method.
+   *
+   * @param request the HTTP request.
+   * @return the request method.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getMethod(Q request);
+
+  /**
+   * Returns the request URL path.
+   *
+   * @param request the HTTP request.
+   * @return the request URL path.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getPath(Q request);
+
+  /**
+   * Returns the request user agent.
+   *
+   * @param request the HTTP request.
+   * @return the request user agent.
+   * @since 0.19
+   */
+  @Nullable
+  public abstract String getUserAgent(Q request);
+
+  /**
+   * Returns the response status code. If the response is null, this method should return {@code 0}.
+   *
+   * @param response the HTTP response.
+   * @return the response status code.
+   * @since 0.19
+   */
+  public abstract int getStatusCode(@Nullable P response);
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpRequestContext.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpRequestContext.java
new file mode 100644
index 0000000..4e16101
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpRequestContext.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
+import io.opencensus.trace.Span;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * This class provides storage per request context on http client and server.
+ *
+ * @since 0.19
+ */
+@ExperimentalApi
+public class HttpRequestContext {
+  @VisibleForTesting static final long INVALID_STARTTIME = -1;
+
+  static final TagMetadata METADATA_NO_PROPAGATION = TagMetadata.create(TagTtl.NO_PROPAGATION);
+
+  @VisibleForTesting final long requestStartTime;
+  @VisibleForTesting final Span span;
+  @VisibleForTesting AtomicLong sentMessageSize = new AtomicLong();
+  @VisibleForTesting AtomicLong receiveMessageSize = new AtomicLong();
+  @VisibleForTesting AtomicLong sentSeqId = new AtomicLong();
+  @VisibleForTesting AtomicLong receviedSeqId = new AtomicLong();
+  @VisibleForTesting final TagContext tagContext;
+
+  HttpRequestContext(Span span, TagContext tagContext) {
+    checkNotNull(span, "span");
+    checkNotNull(tagContext, "tagContext");
+    this.span = span;
+    this.tagContext = tagContext;
+    requestStartTime = System.nanoTime();
+  }
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpServerHandler.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpServerHandler.java
new file mode 100644
index 0000000..c179eec
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/HttpServerHandler.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static io.opencensus.contrib.http.HttpRequestContext.METADATA_NO_PROPAGATION;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_LATENCY;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_METHOD;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_RECEIVED_BYTES;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_ROUTE;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_SENT_BYTES;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_STATUS;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.Link.Type;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Kind;
+import io.opencensus.trace.Span.Options;
+import io.opencensus.trace.SpanBuilder;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat;
+import javax.annotation.Nullable;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
+/**
+ * This helper class provides routine methods to instrument HTTP servers.
+ *
+ * @param <Q> the HTTP request entity.
+ * @param <P> the HTTP response entity.
+ * @param <C> the type of the carrier.
+ * @since 0.19
+ */
+@ExperimentalApi
+public class HttpServerHandler<
+        Q /*>>> extends @NonNull Object*/, P, C /*>>> extends @NonNull Object*/>
+    extends AbstractHttpHandler<Q, P> {
+
+  private final TextFormat.Getter<C> getter;
+  private final TextFormat textFormat;
+  private final Tracer tracer;
+  private final Boolean publicEndpoint;
+  private final StatsRecorder statsRecorder;
+  private final Tagger tagger;
+
+  /**
+   * Creates a {@link HttpServerHandler} with given parameters.
+   *
+   * @param tracer the Open Census tracing component.
+   * @param extractor the {@code HttpExtractor} used to extract information from the
+   *     request/response.
+   * @param textFormat the {@code TextFormat} used in HTTP propagation.
+   * @param getter the getter used when extracting information from the {@code carrier}.
+   * @param publicEndpoint set to true for publicly accessible HTTP(S) server. If true then incoming
+   *     tracecontext will be added as a link instead of as a parent.
+   * @since 0.19
+   */
+  public HttpServerHandler(
+      Tracer tracer,
+      HttpExtractor<Q, P> extractor,
+      TextFormat textFormat,
+      TextFormat.Getter<C> getter,
+      Boolean publicEndpoint) {
+    super(extractor);
+    checkNotNull(tracer, "tracer");
+    checkNotNull(textFormat, "textFormat");
+    checkNotNull(getter, "getter");
+    checkNotNull(publicEndpoint, "publicEndpoint");
+    this.tracer = tracer;
+    this.textFormat = textFormat;
+    this.getter = getter;
+    this.publicEndpoint = publicEndpoint;
+    this.statsRecorder = Stats.getStatsRecorder();
+    this.tagger = Tags.getTagger();
+  }
+
+  /**
+   * Instrument an incoming request before it is handled.
+   *
+   * <p>This method will create a span under the deserialized propagated parent context. If the
+   * parent context is not present, the span will be created under the current context.
+   *
+   * <p>The generated span will NOT be set as current context. User can control when to enter the
+   * scope of this span. Use {@link AbstractHttpHandler#getSpanFromContext} to retrieve the span.
+   *
+   * @param carrier the entity that holds the HTTP information.
+   * @param request the request entity.
+   * @return the {@link HttpRequestContext} that contains stats and trace data associated with the
+   *     request.
+   * @since 0.19
+   */
+  public HttpRequestContext handleStart(C carrier, Q request) {
+    checkNotNull(carrier, "carrier");
+    checkNotNull(request, "request");
+    SpanBuilder spanBuilder = null;
+    String spanName = getSpanName(request, extractor);
+    // de-serialize the context
+    SpanContext spanContext = null;
+    try {
+      spanContext = textFormat.extract(carrier, getter);
+    } catch (SpanContextParseException e) {
+      // TODO: Currently we cannot distinguish between context parse error and missing context.
+      // Logging would be annoying so we just ignore this error and do not even log a message.
+    }
+    if (spanContext == null || publicEndpoint) {
+      spanBuilder = tracer.spanBuilder(spanName);
+    } else {
+      spanBuilder = tracer.spanBuilderWithRemoteParent(spanName, spanContext);
+    }
+
+    Span span = spanBuilder.setSpanKind(Kind.SERVER).startSpan();
+    if (publicEndpoint && spanContext != null) {
+      span.addLink(Link.fromSpanContext(spanContext, Type.PARENT_LINKED_SPAN));
+    }
+
+    if (span.getOptions().contains(Options.RECORD_EVENTS)) {
+      addSpanRequestAttributes(span, request, extractor);
+    }
+
+    return getNewContext(span, tagger.getCurrentTagContext());
+  }
+
+  /**
+   * Close an HTTP span and records stats specific to the request.
+   *
+   * <p>This method will set status of the span and end it. Additionally it will record message
+   * events for the span and record measurements associated with the request.
+   *
+   * @param context the {@link HttpRequestContext} used with {@link
+   *     HttpServerHandler#handleStart(Object, Object)}
+   * @param request the HTTP request entity.
+   * @param response the HTTP response entity. {@code null} means invalid response.
+   * @param error the error occurs when processing the response.
+   * @since 0.19
+   */
+  public void handleEnd(
+      HttpRequestContext context, Q request, @Nullable P response, @Nullable Throwable error) {
+    checkNotNull(context, "context");
+    checkNotNull(request, "request");
+    int httpCode = extractor.getStatusCode(response);
+    recordStats(context, request, httpCode);
+    spanEnd(context.span, httpCode, error);
+  }
+
+  private void recordStats(HttpRequestContext context, Q request, int httpCode) {
+    double requestLatency = NANOSECONDS.toMillis(System.nanoTime() - context.requestStartTime);
+
+    String methodStr = extractor.getMethod(request);
+    String routeStr = extractor.getRoute(request);
+    TagContext startCtx =
+        tagger
+            .toBuilder(context.tagContext)
+            .put(
+                HTTP_SERVER_METHOD,
+                TagValue.create(methodStr == null ? "" : methodStr),
+                METADATA_NO_PROPAGATION)
+            .put(
+                HTTP_SERVER_ROUTE,
+                TagValue.create(routeStr == null ? "" : routeStr),
+                METADATA_NO_PROPAGATION)
+            .put(
+                HTTP_SERVER_STATUS,
+                TagValue.create(httpCode == 0 ? "error" : Integer.toString(httpCode)),
+                METADATA_NO_PROPAGATION)
+            .build();
+
+    statsRecorder
+        .newMeasureMap()
+        .put(HTTP_SERVER_LATENCY, requestLatency)
+        .put(HTTP_SERVER_RECEIVED_BYTES, context.receiveMessageSize.get())
+        .put(HTTP_SERVER_SENT_BYTES, context.sentMessageSize.get())
+        .record(startCtx);
+  }
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpMeasureConstants.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpMeasureConstants.java
index fd73b8a..29d5c6c 100644
--- a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpMeasureConstants.java
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpMeasureConstants.java
@@ -33,7 +33,6 @@
 
   private HttpMeasureConstants() {}
 
-  private static final String UNIT_COUNT = "1";
   private static final String UNIT_SIZE_BYTE = "By";
   private static final String UNIT_LATENCY_MS = "ms";
 
@@ -172,4 +171,12 @@
    * @since 0.13
    */
   public static final TagKey HTTP_SERVER_METHOD = TagKey.create("http_server_method");
+
+  /**
+   * {@link TagKey} for the server-side logical route, a pattern that matched the URL, of a handler
+   * that processed the request.
+   *
+   * @since 0.19
+   */
+  public static final TagKey HTTP_SERVER_ROUTE = TagKey.create("http_server_route");
 }
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceAttributeConstants.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceAttributeConstants.java
new file mode 100644
index 0000000..d2d0705
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceAttributeConstants.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.util;
+
+/** Constant Trace attribute keys for HTTP Request and Response @Since 0.18 */
+public final class HttpTraceAttributeConstants {
+  public static final String HTTP_HOST = "http.host";
+  public static final String HTTP_ROUTE = "http.route";
+  public static final String HTTP_PATH = "http.path";
+  public static final String HTTP_METHOD = "http.method";
+  public static final String HTTP_USER_AGENT = "http.user_agent";
+  public static final String HTTP_URL = "http.url";
+  public static final String HTTP_STATUS_CODE = "http.status_code";
+
+  private HttpTraceAttributeConstants() {}
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceUtil.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceUtil.java
new file mode 100644
index 0000000..bca7b6d
--- /dev/null
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpTraceUtil.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.util;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.trace.Status;
+import javax.annotation.Nullable;
+
+/**
+ * A helper class to provide convenience methods for tracing.
+ *
+ * @since 0.18
+ */
+@ExperimentalApi
+public final class HttpTraceUtil {
+  private static final Status STATUS_100 = Status.UNKNOWN.withDescription("Continue");
+  private static final Status STATUS_101 = Status.UNKNOWN.withDescription("Switching Protocols");
+  private static final Status STATUS_402 = Status.UNKNOWN.withDescription("Payment Required");
+  private static final Status STATUS_405 = Status.UNKNOWN.withDescription("Method Not Allowed");
+  private static final Status STATUS_406 = Status.UNKNOWN.withDescription("Not Acceptable");
+  private static final Status STATUS_407 =
+      Status.UNKNOWN.withDescription("Proxy Authentication Required");
+  private static final Status STATUS_408 = Status.UNKNOWN.withDescription("Request Time-out");
+  private static final Status STATUS_409 = Status.UNKNOWN.withDescription("Conflict");
+  private static final Status STATUS_410 = Status.UNKNOWN.withDescription("Gone");
+  private static final Status STATUS_411 = Status.UNKNOWN.withDescription("Length Required");
+  private static final Status STATUS_412 = Status.UNKNOWN.withDescription("Precondition Failed");
+  private static final Status STATUS_413 =
+      Status.UNKNOWN.withDescription("Request Entity Too Large");
+  private static final Status STATUS_414 = Status.UNKNOWN.withDescription("Request-URI Too Large");
+  private static final Status STATUS_415 = Status.UNKNOWN.withDescription("Unsupported Media Type");
+  private static final Status STATUS_416 =
+      Status.UNKNOWN.withDescription("Requested range not satisfiable");
+  private static final Status STATUS_417 = Status.UNKNOWN.withDescription("Expectation Failed");
+  private static final Status STATUS_500 = Status.UNKNOWN.withDescription("Internal Server Error");
+  private static final Status STATUS_502 = Status.UNKNOWN.withDescription("Bad Gateway");
+  private static final Status STATUS_505 =
+      Status.UNKNOWN.withDescription("HTTP Version not supported");
+
+  private HttpTraceUtil() {}
+
+  /**
+   * Parse OpenCensus Status from HTTP response status code.
+   *
+   * <p>This method serves a default routine to map HTTP status code to Open Census Status. The
+   * mapping is defined in <a
+   * href="https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto">Google API
+   * canonical error code</a>, and the behavior is defined in <a
+   * href="https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md">OpenCensus
+   * Specs</a>.
+   *
+   * @param statusCode the HTTP response status code. {@code 0} means invalid response.
+   * @param error the error occurred during response transmission (optional).
+   * @return the corresponding OpenCensus {@code Status}.
+   * @since 0.18
+   */
+  public static final Status parseResponseStatus(int statusCode, @Nullable Throwable error) {
+    String message = null;
+
+    if (error != null) {
+      message = error.getMessage();
+      if (message == null) {
+        message = error.getClass().getSimpleName();
+      }
+    }
+
+    // set status according to response
+    if (statusCode == 0) {
+      return Status.UNKNOWN.withDescription(message);
+    } else {
+      if (statusCode >= 200 && statusCode < 400) {
+        return Status.OK;
+      } else {
+        // error code, try parse it
+        switch (statusCode) {
+          case 100:
+            return STATUS_100;
+          case 101:
+            return STATUS_101;
+          case 400:
+            return Status.INVALID_ARGUMENT.withDescription(message);
+          case 401:
+            return Status.UNAUTHENTICATED.withDescription(message);
+          case 402:
+            return STATUS_402;
+          case 403:
+            return Status.PERMISSION_DENIED.withDescription(message);
+          case 404:
+            return Status.NOT_FOUND.withDescription(message);
+          case 405:
+            return STATUS_405;
+          case 406:
+            return STATUS_406;
+          case 407:
+            return STATUS_407;
+          case 408:
+            return STATUS_408;
+          case 409:
+            return STATUS_409;
+          case 410:
+            return STATUS_410;
+          case 411:
+            return STATUS_411;
+          case 412:
+            return STATUS_412;
+          case 413:
+            return STATUS_413;
+          case 414:
+            return STATUS_414;
+          case 415:
+            return STATUS_415;
+          case 416:
+            return STATUS_416;
+          case 417:
+            return STATUS_417;
+          case 429:
+            return Status.RESOURCE_EXHAUSTED.withDescription(message);
+          case 500:
+            return STATUS_500;
+          case 501:
+            return Status.UNIMPLEMENTED.withDescription(message);
+          case 502:
+            return STATUS_502;
+          case 503:
+            return Status.UNAVAILABLE.withDescription(message);
+          case 504:
+            return Status.DEADLINE_EXCEEDED.withDescription(message);
+          case 505:
+            return STATUS_505;
+          default:
+            return Status.UNKNOWN.withDescription(message);
+        }
+      }
+    }
+  }
+}
diff --git a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpViewConstants.java b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpViewConstants.java
index 54ad20c..f5da0a0 100644
--- a/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpViewConstants.java
+++ b/contrib/http_util/src/main/java/io/opencensus/contrib/http/util/HttpViewConstants.java
@@ -17,15 +17,14 @@
 package io.opencensus.contrib.http.util;
 
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_METHOD;
-import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_PATH;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_RECEIVED_BYTES;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_ROUNDTRIP_LATENCY;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_SENT_BYTES;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_CLIENT_STATUS;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_LATENCY;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_METHOD;
-import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_PATH;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_RECEIVED_BYTES;
+import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_ROUTE;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_SENT_BYTES;
 import static io.opencensus.contrib.http.util.HttpMeasureConstants.HTTP_SERVER_STATUS;
 
@@ -95,7 +94,7 @@
           "Count of client-side HTTP requests completed",
           HTTP_CLIENT_ROUNDTRIP_LATENCY,
           COUNT,
-          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_PATH));
+          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_STATUS));
 
   /**
    * {@link View} for size distribution of client-side HTTP request body.
@@ -108,7 +107,7 @@
           "Size distribution of client-side HTTP request body",
           HTTP_CLIENT_SENT_BYTES,
           SIZE_DISTRIBUTION,
-          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_PATH));
+          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_STATUS));
 
   /**
    * {@link View} for size distribution of client-side HTTP response body.
@@ -121,7 +120,7 @@
           "Size distribution of client-side HTTP response body",
           HTTP_CLIENT_RECEIVED_BYTES,
           SIZE_DISTRIBUTION,
-          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_PATH));
+          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_STATUS));
 
   /**
    * {@link View} for roundtrip latency distribution of client-side HTTP requests.
@@ -134,7 +133,7 @@
           "Roundtrip latency distribution of client-side HTTP requests",
           HTTP_CLIENT_ROUNDTRIP_LATENCY,
           LATENCY_DISTRIBUTION,
-          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_PATH, HTTP_CLIENT_STATUS));
+          Arrays.asList(HTTP_CLIENT_METHOD, HTTP_CLIENT_STATUS));
 
   /**
    * {@link View} for count of server-side HTTP requests serving completed.
@@ -147,7 +146,7 @@
           "Count of HTTP server-side requests serving completed",
           HTTP_SERVER_LATENCY,
           COUNT,
-          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_PATH));
+          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_ROUTE, HTTP_SERVER_STATUS));
 
   /**
    * {@link View} for size distribution of server-side HTTP request body.
@@ -160,7 +159,7 @@
           "Size distribution of server-side HTTP request body",
           HTTP_SERVER_RECEIVED_BYTES,
           SIZE_DISTRIBUTION,
-          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_PATH));
+          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_ROUTE, HTTP_SERVER_STATUS));
 
   /**
    * {@link View} for size distribution of server-side HTTP response body.
@@ -173,7 +172,7 @@
           "Size distribution of server-side HTTP response body",
           HTTP_SERVER_SENT_BYTES,
           SIZE_DISTRIBUTION,
-          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_PATH));
+          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_ROUTE, HTTP_SERVER_STATUS));
 
   /**
    * {@link View} for latency distribution of server-side HTTP requests serving.
@@ -186,5 +185,5 @@
           "Latency distribution of server-side HTTP requests serving",
           HTTP_SERVER_LATENCY,
           LATENCY_DISTRIBUTION,
-          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_PATH, HTTP_SERVER_STATUS));
+          Arrays.asList(HTTP_SERVER_METHOD, HTTP_SERVER_ROUTE, HTTP_SERVER_STATUS));
 }
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/AbstractHttpHandlerTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/AbstractHttpHandlerTest.java
new file mode 100644
index 0000000..9e082cf
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/AbstractHttpHandlerTest.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.util.HttpTraceAttributeConstants;
+import io.opencensus.contrib.http.util.testing.FakeSpan;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.MessageEvent.Type;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Options;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.Spy;
+
+/** Unit tests for {@link AbstractHttpHandler}. */
+@RunWith(JUnit4.class)
+public class AbstractHttpHandlerTest {
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private final Object request = new Object();
+  private final Exception error = new Exception("test");
+  private final Random random = new Random();
+  private final SpanContext spanContext =
+      SpanContext.create(
+          TraceId.generateRandomId(random),
+          SpanId.generateRandomId(random),
+          TraceOptions.DEFAULT,
+          null);
+  Map<String, String> attributeMap = new HashMap<String, String>();
+  @Mock private Span span;
+  @Mock private HttpExtractor<Object, Object> extractor;
+  private AbstractHttpHandler<Object, Object> handler;
+  @Captor private ArgumentCaptor<MessageEvent> captor;
+  @Captor private ArgumentCaptor<AttributeValue> attributeCaptor;
+  @Captor private ArgumentCaptor<Status> statusCaptor;
+  @Captor private ArgumentCaptor<EndSpanOptions> optionsCaptor;
+
+  @Spy private FakeSpan fakeSpan = new FakeSpan(spanContext, EnumSet.of(Options.RECORD_EVENTS));
+  private final TagContext tagContext = Tags.getTagger().getCurrentTagContext();
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    handler = new AbstractHttpHandler<Object, Object>(extractor) {};
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_HOST, "example.com");
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_ROUTE, "/get/:name");
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_PATH, "/get/helloworld");
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_METHOD, "GET");
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_USER_AGENT, "test 1.0");
+    attributeMap.put(HttpTraceAttributeConstants.HTTP_URL, "http://example.com/get/helloworld");
+  }
+
+  @Test
+  public void constructorDisallowNullExtractor() {
+    thrown.expect(NullPointerException.class);
+    new AbstractHttpHandler<Object, Object>(null) {};
+  }
+
+  @Test
+  public void handleMessageSent() {
+    Type type = Type.SENT;
+    long uncompressed = 456L;
+    HttpRequestContext context = new HttpRequestContext(fakeSpan, tagContext);
+    handler.handleMessageSent(context, uncompressed);
+    verify(fakeSpan).addMessageEvent(captor.capture());
+
+    MessageEvent messageEvent = captor.getValue();
+    assertThat(messageEvent.getType()).isEqualTo(type);
+    assertThat(messageEvent.getMessageId()).isEqualTo(1L);
+    assertThat(messageEvent.getUncompressedMessageSize()).isEqualTo(uncompressed);
+    assertThat(messageEvent.getCompressedMessageSize()).isEqualTo(0);
+  }
+
+  @Test
+  public void handleMessageReceived() {
+    Type type = Type.RECEIVED;
+    long uncompressed = 456L;
+    HttpRequestContext context = new HttpRequestContext(fakeSpan, tagContext);
+    handler.handleMessageReceived(context, uncompressed);
+    verify(fakeSpan).addMessageEvent(captor.capture());
+
+    MessageEvent messageEvent = captor.getValue();
+    assertThat(messageEvent.getType()).isEqualTo(type);
+    assertThat(messageEvent.getMessageId()).isEqualTo(1L);
+    assertThat(messageEvent.getUncompressedMessageSize()).isEqualTo(uncompressed);
+    assertThat(messageEvent.getCompressedMessageSize()).isEqualTo(0);
+  }
+
+  @Test
+  public void handleEndDisallowNullSpan() {
+    thrown.expect(NullPointerException.class);
+    handler.spanEnd(null, 0, error);
+  }
+
+  @Test
+  public void handleEndAllowZeroCodeAndNullError() {
+    handler.spanEnd(fakeSpan, 0, null);
+    verify(fakeSpan).setStatus(statusCaptor.capture());
+    assertThat(statusCaptor.getValue()).isEqualTo(Status.UNKNOWN);
+  }
+
+  @Test
+  public void handleEndAllowNonZeroCodeAndNullError() {
+    handler.spanEnd(fakeSpan, 200, null);
+    verify(fakeSpan).setStatus(statusCaptor.capture());
+    assertThat(statusCaptor.getValue()).isEqualTo(Status.OK);
+  }
+
+  @Test
+  public void handleEndShouldEndSpan() {
+    when(extractor.getStatusCode(any(Object.class))).thenReturn(0);
+
+    handler.spanEnd(fakeSpan, 0, error);
+    verify(fakeSpan).end(optionsCaptor.capture());
+    assertThat(optionsCaptor.getValue()).isEqualTo(EndSpanOptions.DEFAULT);
+  }
+
+  @Test
+  public void handleEndWithRecordEvents() {
+    when(extractor.getStatusCode(any(Object.class))).thenReturn(0);
+    handler.spanEnd(fakeSpan, 0, error);
+    verify(fakeSpan)
+        .putAttribute(eq(HttpTraceAttributeConstants.HTTP_STATUS_CODE), attributeCaptor.capture());
+    assertThat(attributeCaptor.getValue()).isEqualTo(AttributeValue.longAttributeValue(0));
+  }
+
+  @Test
+  public void testSpanName() {
+    String spanName = handler.getSpanName(request, extractor);
+    assertThat(spanName).isNotNull();
+  }
+
+  private void verifyAttributes(String key) {
+    verify(span).putAttribute(eq(key), attributeCaptor.capture());
+    assertThat(attributeCaptor.getValue().toString()).contains(attributeMap.get(key));
+  }
+
+  @Test
+  public void testSpanRequestAttributes() {
+    when(extractor.getRoute(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_ROUTE));
+    when(extractor.getHost(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_HOST));
+    when(extractor.getPath(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_PATH));
+    when(extractor.getMethod(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_METHOD));
+    when(extractor.getUserAgent(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_USER_AGENT));
+    when(extractor.getUrl(any(Object.class)))
+        .thenReturn(attributeMap.get(HttpTraceAttributeConstants.HTTP_URL));
+
+    handler.addSpanRequestAttributes(span, request, extractor);
+
+    for (Entry<String, String> entry : attributeMap.entrySet()) {
+      verifyAttributes(entry.getKey());
+    }
+  }
+
+  @Test
+  public void testGetNewContext() {
+    HttpRequestContext context = handler.getNewContext(fakeSpan, tagContext);
+    assertThat(context).isNotNull();
+  }
+
+  @Test
+  public void testGetSpanFromContext() {
+    HttpRequestContext context = handler.getNewContext(fakeSpan, tagContext);
+    assertThat(handler.getSpanFromContext(context)).isEqualTo(fakeSpan);
+  }
+}
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpClientHandlerTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpClientHandlerTest.java
new file mode 100644
index 0000000..6875212
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpClientHandlerTest.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.same;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.util.testing.FakeSpan;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.Span.Kind;
+import io.opencensus.trace.SpanBuilder;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.propagation.TextFormat;
+import java.util.Random;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.Spy;
+
+/** Unit tests for {@link HttpClientHandler}. */
+@RunWith(JUnit4.class)
+public class HttpClientHandlerTest {
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private final Random random = new Random();
+  private final SpanContext spanContext =
+      SpanContext.create(
+          TraceId.generateRandomId(random),
+          SpanId.generateRandomId(random),
+          TraceOptions.DEFAULT,
+          null);
+  private final Object request = new Object();
+  private final Object carrier = new Object();
+  private final Object response = new Object();
+  @Mock private SpanBuilder spanBuilder;
+  @Mock private Tracer tracer;
+  @Mock private TextFormat textFormat;
+  @Mock private TextFormat.Setter<Object> textFormatSetter;
+  @Mock private HttpExtractor<Object, Object> extractor;
+  private HttpClientHandler<Object, Object, Object> handler;
+  @Spy private FakeSpan parentSpan = new FakeSpan(spanContext, null);
+  private final FakeSpan childSpan = new FakeSpan(parentSpan.getContext(), null);
+  @Captor private ArgumentCaptor<EndSpanOptions> optionsCaptor;
+  private final TagContext tagContext = Tags.getTagger().getCurrentTagContext();
+  @Captor private ArgumentCaptor<Kind> kindArgumentCaptor;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    handler =
+        new HttpClientHandler<Object, Object, Object>(
+            tracer, extractor, textFormat, textFormatSetter);
+    when(tracer.spanBuilderWithExplicitParent(any(String.class), same(parentSpan)))
+        .thenReturn(spanBuilder);
+    when(spanBuilder.setSpanKind(any(Kind.class))).thenReturn(spanBuilder);
+    when(spanBuilder.startSpan()).thenReturn(childSpan);
+  }
+
+  @Test
+  public void constructorDisallowNullTextFormatSetter() {
+    thrown.expect(NullPointerException.class);
+    new HttpClientHandler<Object, Object, Object>(tracer, extractor, textFormat, null);
+  }
+
+  @Test
+  public void handleStartWithoutSpanDisallowNullCarrier() {
+    thrown.expect(NullPointerException.class);
+    handler.handleStart(parentSpan, /*carrier=*/ null, request);
+  }
+
+  @Test
+  public void handleStartDisallowNullRequest() {
+    thrown.expect(NullPointerException.class);
+    handler.handleStart(parentSpan, carrier, /*request=*/ null);
+  }
+
+  @Test
+  public void handleStartShouldCreateChildSpanInCurrentContext() {
+    Scope scope = tracer.withSpan(parentSpan);
+    try {
+      HttpRequestContext context = handler.handleStart(null, carrier, request);
+      verify(tracer).spanBuilderWithExplicitParent(any(String.class), same(parentSpan));
+      assertThat(context.span).isEqualTo(childSpan);
+    } finally {
+      scope.close();
+    }
+  }
+
+  @Test
+  public void handleStartCreateChildSpanInSpecifiedContext() {
+    // without scope
+    HttpRequestContext context = handler.handleStart(parentSpan, carrier, request);
+    verify(tracer).spanBuilderWithExplicitParent(any(String.class), same(parentSpan));
+    assertThat(context.span).isEqualTo(childSpan);
+  }
+
+  @Test
+  public void handleStartShouldInjectCarrier() {
+    handler.handleStart(parentSpan, carrier, request);
+    verify(textFormat).inject(same(spanContext), same(carrier), same(textFormatSetter));
+  }
+
+  @Test
+  public void handleStartShouldSetKindToClient() {
+    handler.handleStart(parentSpan, carrier, request);
+    verify(spanBuilder).setSpanKind(kindArgumentCaptor.capture());
+
+    Kind kind = kindArgumentCaptor.getValue();
+    assertThat(kind).isEqualTo(Kind.CLIENT);
+  }
+
+  @Test
+  public void handleEndDisallowNullContext() {
+    thrown.expect(NullPointerException.class);
+    handler.handleEnd(null, request, response, null);
+  }
+
+  @Test
+  public void handleEndShouldEndSpan() {
+    HttpRequestContext context = new HttpRequestContext(parentSpan, tagContext);
+    when(extractor.getStatusCode(any(Object.class))).thenReturn(0);
+    handler.handleEnd(context, request, response, null);
+    verify(parentSpan).end(optionsCaptor.capture());
+    EndSpanOptions options = optionsCaptor.getValue();
+    assertThat(options).isEqualTo(EndSpanOptions.DEFAULT);
+  }
+}
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpRequestContextTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpRequestContextTest.java
new file mode 100644
index 0000000..695e094
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpRequestContextTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Tracing;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link HttpRequestContext}. */
+@RunWith(JUnit4.class)
+public class HttpRequestContextTest {
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private final Span span = Tracing.getTracer().spanBuilder("testSpan").startSpan();
+  private final TagContext tagContext = Tags.getTagger().getCurrentTagContext();
+  private final HttpRequestContext context = new HttpRequestContext(span, tagContext);
+
+  @Test
+  public void testDisallowNullSpan() {
+    thrown.expect(NullPointerException.class);
+    new HttpRequestContext(null, tagContext);
+  }
+
+  @Test
+  public void testDisallowNullTagContext() {
+    thrown.expect(NullPointerException.class);
+    new HttpRequestContext(span, null);
+  }
+
+  @Test
+  public void testInitValues() {
+    assertThat(context.requestStartTime).isGreaterThan(0L);
+    assertThat(context.sentMessageSize.longValue()).isEqualTo(0L);
+    assertThat(context.receiveMessageSize.longValue()).isEqualTo(0L);
+  }
+}
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpServerHandlerTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpServerHandlerTest.java
new file mode 100644
index 0000000..705cfa7
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/HttpServerHandlerTest.java
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.same;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.contrib.http.util.testing.FakeSpan;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.Link.Type;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Kind;
+import io.opencensus.trace.SpanBuilder;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat;
+import java.util.Random;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.Spy;
+
+/** Unit tests for {@link HttpServerHandler}. */
+@RunWith(JUnit4.class)
+public class HttpServerHandlerTest {
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private final Random random = new Random();
+  private final SpanContext spanContextRemote =
+      SpanContext.create(
+          TraceId.generateRandomId(random),
+          SpanId.generateRandomId(random),
+          TraceOptions.DEFAULT,
+          null);
+  private final SpanContext spanContextLocal =
+      SpanContext.create(
+          TraceId.generateRandomId(random),
+          SpanId.generateRandomId(random),
+          TraceOptions.DEFAULT,
+          null);
+  private final Object request = new Object();
+  private final Object response = new Object();
+  private final Object carrier = new Object();
+  @Mock private SpanBuilder spanBuilderWithRemoteParent;
+  @Mock private SpanBuilder spanBuilderWithLocalParent;
+  @Mock private Tracer tracer;
+  @Mock private TextFormat textFormat;
+  @Mock private TextFormat.Getter<Object> textFormatGetter;
+  @Mock private HttpExtractor<Object, Object> extractor;
+  @Captor private ArgumentCaptor<Link> captor;
+  @Captor private ArgumentCaptor<Kind> kindArgumentCaptor;
+  @Captor private ArgumentCaptor<EndSpanOptions> optionsCaptor;
+  private HttpServerHandler<Object, Object, Object> handler;
+  private HttpServerHandler<Object, Object, Object> handlerForPublicEndpoint;
+  // TODO(hailongwen): use `MockableSpan` instead.
+  @Spy private FakeSpan spanWithLocalParent = new FakeSpan(spanContextLocal, null);
+  @Spy private FakeSpan spanWithRemoteParent = new FakeSpan(spanContextRemote, null);
+  private final TagContext tagContext = Tags.getTagger().getCurrentTagContext();
+  private final HttpRequestContext context =
+      new HttpRequestContext(spanWithLocalParent, tagContext);
+
+  @Before
+  public void setUp() throws SpanContextParseException {
+    MockitoAnnotations.initMocks(this);
+    handler =
+        new HttpServerHandler<Object, Object, Object>(
+            tracer, extractor, textFormat, textFormatGetter, false);
+    handlerForPublicEndpoint =
+        new HttpServerHandler<Object, Object, Object>(
+            tracer, extractor, textFormat, textFormatGetter, true);
+
+    when(tracer.spanBuilderWithRemoteParent(any(String.class), same(spanContextRemote)))
+        .thenReturn(spanBuilderWithRemoteParent);
+    when(tracer.spanBuilderWithExplicitParent(any(String.class), any(Span.class)))
+        .thenReturn(spanBuilderWithLocalParent);
+    when(spanBuilderWithRemoteParent.setSpanKind(any(Kind.class)))
+        .thenReturn(spanBuilderWithRemoteParent);
+    when(spanBuilderWithLocalParent.setSpanKind(any(Kind.class)))
+        .thenReturn(spanBuilderWithLocalParent);
+    when(spanBuilderWithRemoteParent.startSpan()).thenReturn(spanWithRemoteParent);
+    when(spanBuilderWithLocalParent.startSpan()).thenReturn(spanWithLocalParent);
+
+    when(textFormat.extract(same(carrier), same(textFormatGetter))).thenReturn(spanContextRemote);
+  }
+
+  @Test
+  public void constructorDisallowNullTextFormatGetter() {
+    thrown.expect(NullPointerException.class);
+    new HttpServerHandler<Object, Object, Object>(tracer, extractor, textFormat, null, false);
+  }
+
+  @Test
+  public void handleStartDisallowNullCarrier() {
+    thrown.expect(NullPointerException.class);
+    handler.handleStart(/*carrier=*/ null, request);
+  }
+
+  @Test
+  public void handleStartDisallowNullRequest() {
+    thrown.expect(NullPointerException.class);
+    handler.handleStart(carrier, /*request=*/ null);
+  }
+
+  @Test
+  public void handleStartShouldCreateChildSpanUnderParent() throws SpanContextParseException {
+    HttpRequestContext context = handler.handleStart(carrier, request);
+    verify(tracer).spanBuilderWithRemoteParent(any(String.class), same(spanContextRemote));
+    assertThat(context.span).isEqualTo(spanWithRemoteParent);
+  }
+
+  @Test
+  public void handleStartShouldIgnoreContextParseException() throws Exception {
+    when(textFormat.extract(same(carrier), same(textFormatGetter)))
+        .thenThrow(new SpanContextParseException("test"));
+    HttpRequestContext context = handler.handleStart(carrier, request);
+    verify(tracer).spanBuilderWithExplicitParent(any(String.class), any(Span.class));
+    assertThat(context.span).isEqualTo(spanWithLocalParent);
+  }
+
+  @Test
+  public void handleStartShouldExtractFromCarrier() throws SpanContextParseException {
+    handler.handleStart(carrier, request);
+    verify(textFormat).extract(same(carrier), same(textFormatGetter));
+  }
+
+  @Test
+  public void handleStartShouldSetKindToServer() throws SpanContextParseException {
+    handler.handleStart(carrier, request);
+    verify(spanBuilderWithRemoteParent).setSpanKind(kindArgumentCaptor.capture());
+
+    Kind kind = kindArgumentCaptor.getValue();
+    assertThat(kind).isEqualTo(Kind.SERVER);
+  }
+
+  @Test
+  public void handleStartWithPublicEndpointShouldAddLink() throws Exception {
+    handlerForPublicEndpoint.handleStart(carrier, request);
+    verify(tracer).spanBuilderWithExplicitParent(any(String.class), any(Span.class));
+    verify(spanWithLocalParent).addLink(captor.capture());
+
+    Link link = captor.getValue();
+    assertThat(link.getSpanId()).isEqualTo(spanContextRemote.getSpanId());
+    assertThat(link.getTraceId()).isEqualTo(spanContextRemote.getTraceId());
+    assertThat(link.getType()).isEqualTo(Type.PARENT_LINKED_SPAN);
+  }
+
+  @Test
+  public void handleEndDisallowNullRequest() {
+    thrown.expect(NullPointerException.class);
+    handler.handleEnd(context, null, response, null);
+  }
+
+  @Test
+  public void handleEndShouldEndSpan() {
+    HttpRequestContext context = new HttpRequestContext(spanWithLocalParent, tagContext);
+    when(extractor.getStatusCode(any(Object.class))).thenReturn(0);
+
+    handler.handleEnd(context, carrier, response, null);
+    verify(spanWithLocalParent).end(optionsCaptor.capture());
+    EndSpanOptions options = optionsCaptor.getValue();
+    assertThat(options).isEqualTo(EndSpanOptions.DEFAULT);
+  }
+}
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpTraceUtilTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpTraceUtilTest.java
new file mode 100644
index 0000000..0e783b8
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpTraceUtilTest.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Status.CanonicalCode;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link HttpTraceUtilTest}. */
+@RunWith(JUnit4.class)
+public class HttpTraceUtilTest {
+
+  @Test
+  public void parseResponseStatusSucceed() {
+    assertThat(HttpTraceUtil.parseResponseStatus(201, null)).isEqualTo(Status.OK);
+  }
+
+  @Test
+  public void parseResponseStatusNoResponse() {
+    assertThat(HttpTraceUtil.parseResponseStatus(0, null).getDescription()).isEqualTo(null);
+    assertThat(HttpTraceUtil.parseResponseStatus(0, null).getCanonicalCode())
+        .isEqualTo(CanonicalCode.UNKNOWN);
+  }
+
+  @Test
+  public void parseResponseStatusErrorWithMessage() {
+    Throwable error = new Exception("testError");
+    assertThat(HttpTraceUtil.parseResponseStatus(0, error).getDescription()).isEqualTo("testError");
+  }
+
+  @Test
+  public void parseResponseStatusErrorWithoutMessage() {
+    Throwable error = new NullPointerException();
+    assertThat(HttpTraceUtil.parseResponseStatus(0, error).getDescription())
+        .isEqualTo("NullPointerException");
+  }
+
+  private static void parseResponseStatus(
+      int code, CanonicalCode expectedCanonicalCode, String expectedDesc) {
+    Status status = HttpTraceUtil.parseResponseStatus(code, null);
+    assertThat(status.getCanonicalCode()).isEqualTo(expectedCanonicalCode);
+    assertThat(status.getDescription()).isEqualTo(expectedDesc);
+  }
+
+  @Test
+  public void parseResponseStatusCode_100() {
+    parseResponseStatus(100, CanonicalCode.UNKNOWN, "Continue");
+  }
+
+  @Test
+  public void parseResponseStatusCode_101() {
+    parseResponseStatus(101, CanonicalCode.UNKNOWN, "Switching Protocols");
+  }
+
+  @Test
+  public void parseResponseStatusError_400() {
+    parseResponseStatus(400, CanonicalCode.INVALID_ARGUMENT, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_401() {
+    parseResponseStatus(401, CanonicalCode.UNAUTHENTICATED, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_402() {
+    parseResponseStatus(402, CanonicalCode.UNKNOWN, "Payment Required");
+  }
+
+  @Test
+  public void parseResponseStatusError_403() {
+    parseResponseStatus(403, CanonicalCode.PERMISSION_DENIED, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_404() {
+    parseResponseStatus(404, CanonicalCode.NOT_FOUND, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_405() {
+    parseResponseStatus(405, CanonicalCode.UNKNOWN, "Method Not Allowed");
+  }
+
+  @Test
+  public void parseResponseStatusError_406() {
+    parseResponseStatus(406, CanonicalCode.UNKNOWN, "Not Acceptable");
+  }
+
+  @Test
+  public void parseResponseStatusError_407() {
+    parseResponseStatus(407, CanonicalCode.UNKNOWN, "Proxy Authentication Required");
+  }
+
+  @Test
+  public void parseResponseStatusError_408() {
+    parseResponseStatus(408, CanonicalCode.UNKNOWN, "Request Time-out");
+  }
+
+  @Test
+  public void parseResponseStatusError_409() {
+    parseResponseStatus(409, CanonicalCode.UNKNOWN, "Conflict");
+  }
+
+  @Test
+  public void parseResponseStatusError_410() {
+    parseResponseStatus(410, CanonicalCode.UNKNOWN, "Gone");
+  }
+
+  @Test
+  public void parseResponseStatusError_411() {
+    parseResponseStatus(411, CanonicalCode.UNKNOWN, "Length Required");
+  }
+
+  @Test
+  public void parseResponseStatusError_412() {
+    parseResponseStatus(412, CanonicalCode.UNKNOWN, "Precondition Failed");
+  }
+
+  @Test
+  public void parseResponseStatusError_413() {
+    parseResponseStatus(413, CanonicalCode.UNKNOWN, "Request Entity Too Large");
+  }
+
+  @Test
+  public void parseResponseStatusError_414() {
+    parseResponseStatus(414, CanonicalCode.UNKNOWN, "Request-URI Too Large");
+  }
+
+  @Test
+  public void parseResponseStatusError_415() {
+    parseResponseStatus(415, CanonicalCode.UNKNOWN, "Unsupported Media Type");
+  }
+
+  @Test
+  public void parseResponseStatusError_416() {
+    parseResponseStatus(416, CanonicalCode.UNKNOWN, "Requested range not satisfiable");
+  }
+
+  @Test
+  public void parseResponseStatusError_417() {
+    parseResponseStatus(417, CanonicalCode.UNKNOWN, "Expectation Failed");
+  }
+
+  @Test
+  public void parseResponseStatusError_429() {
+    parseResponseStatus(429, CanonicalCode.RESOURCE_EXHAUSTED, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_500() {
+    parseResponseStatus(500, CanonicalCode.UNKNOWN, "Internal Server Error");
+  }
+
+  @Test
+  public void parseResponseStatusError_501() {
+    parseResponseStatus(501, CanonicalCode.UNIMPLEMENTED, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_502() {
+    parseResponseStatus(502, CanonicalCode.UNKNOWN, "Bad Gateway");
+  }
+
+  @Test
+  public void parseResponseStatusError_503() {
+    parseResponseStatus(503, CanonicalCode.UNAVAILABLE, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_504() {
+    parseResponseStatus(504, CanonicalCode.DEADLINE_EXCEEDED, null);
+  }
+
+  @Test
+  public void parseResponseStatusError_505() {
+    parseResponseStatus(505, CanonicalCode.UNKNOWN, "HTTP Version not supported");
+  }
+
+  @Test
+  public void parseResponseStatusError_Others() {
+    // some random status code
+    assertThat(HttpTraceUtil.parseResponseStatus(434, null).getCanonicalCode())
+        .isEqualTo(CanonicalCode.UNKNOWN);
+    assertThat(HttpTraceUtil.parseResponseStatus(517, null).getCanonicalCode())
+        .isEqualTo(CanonicalCode.UNKNOWN);
+  }
+}
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpViewConstantsTest.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpViewConstantsTest.java
index d008348..0df833b 100644
--- a/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpViewConstantsTest.java
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/HttpViewConstantsTest.java
@@ -38,7 +38,6 @@
                 .getBucketBoundaries()
                 .getBoundaries())
         .containsExactly(
-            0.0,
             1024.0,
             2048.0,
             4096.0,
@@ -59,9 +58,9 @@
                 .getBucketBoundaries()
                 .getBoundaries())
         .containsExactly(
-            0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0, 13.0, 16.0, 20.0, 25.0, 30.0, 40.0, 50.0,
-            65.0, 80.0, 100.0, 130.0, 160.0, 200.0, 250.0, 300.0, 400.0, 500.0, 650.0, 800.0,
-            1000.0, 2000.0, 5000.0, 10000.0, 20000.0, 50000.0, 100000.0)
+            1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0, 13.0, 16.0, 20.0, 25.0, 30.0, 40.0, 50.0, 65.0,
+            80.0, 100.0, 130.0, 160.0, 200.0, 250.0, 300.0, 400.0, 500.0, 650.0, 800.0, 1000.0,
+            2000.0, 5000.0, 10000.0, 20000.0, 50000.0, 100000.0)
         .inOrder();
 
     // Test views.
@@ -118,31 +117,35 @@
 
     assertThat(HttpViewConstants.HTTP_CLIENT_COMPLETED_COUNT_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_PATH);
+            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_STATUS);
     assertThat(HttpViewConstants.HTTP_CLIENT_SENT_BYTES_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_PATH);
+            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_STATUS);
     assertThat(HttpViewConstants.HTTP_CLIENT_RECEIVED_BYTES_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_PATH);
+            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_STATUS);
     assertThat(HttpViewConstants.HTTP_CLIENT_ROUNDTRIP_LATENCY_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_CLIENT_METHOD,
-            HttpMeasureConstants.HTTP_CLIENT_PATH,
-            HttpMeasureConstants.HTTP_CLIENT_STATUS);
+            HttpMeasureConstants.HTTP_CLIENT_METHOD, HttpMeasureConstants.HTTP_CLIENT_STATUS);
     assertThat(HttpViewConstants.HTTP_SERVER_COMPLETED_COUNT_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_SERVER_METHOD, HttpMeasureConstants.HTTP_SERVER_PATH);
+            HttpMeasureConstants.HTTP_SERVER_METHOD,
+            HttpMeasureConstants.HTTP_SERVER_ROUTE,
+            HttpMeasureConstants.HTTP_SERVER_STATUS);
     assertThat(HttpViewConstants.HTTP_SERVER_RECEIVED_BYTES_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_SERVER_METHOD, HttpMeasureConstants.HTTP_SERVER_PATH);
+            HttpMeasureConstants.HTTP_SERVER_METHOD,
+            HttpMeasureConstants.HTTP_SERVER_ROUTE,
+            HttpMeasureConstants.HTTP_SERVER_STATUS);
     assertThat(HttpViewConstants.HTTP_SERVER_SENT_BYTES_VIEW.getColumns())
         .containsExactly(
-            HttpMeasureConstants.HTTP_SERVER_METHOD, HttpMeasureConstants.HTTP_SERVER_PATH);
+            HttpMeasureConstants.HTTP_SERVER_METHOD,
+            HttpMeasureConstants.HTTP_SERVER_ROUTE,
+            HttpMeasureConstants.HTTP_SERVER_STATUS);
     assertThat(HttpViewConstants.HTTP_SERVER_LATENCY_VIEW.getColumns())
         .containsExactly(
             HttpMeasureConstants.HTTP_SERVER_METHOD,
-            HttpMeasureConstants.HTTP_SERVER_PATH,
+            HttpMeasureConstants.HTTP_SERVER_ROUTE,
             HttpMeasureConstants.HTTP_SERVER_STATUS);
   }
 }
diff --git a/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/testing/FakeSpan.java b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/testing/FakeSpan.java
new file mode 100644
index 0000000..2351a2c
--- /dev/null
+++ b/contrib/http_util/src/test/java/io/opencensus/contrib/http/util/testing/FakeSpan.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.http.util.testing;
+
+import io.opencensus.common.Internal;
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.Status;
+import java.util.EnumSet;
+import java.util.Map;
+
+/** A fake {@link Span} which allows user to set {@link SpanContext} upon initialization. */
+@Internal
+public class FakeSpan extends Span {
+  public FakeSpan(SpanContext context, EnumSet<Options> options) {
+    super(context, options);
+  }
+
+  @Override
+  public void putAttribute(String key, AttributeValue value) {}
+
+  @Override
+  public void putAttributes(Map<String, AttributeValue> attributes) {}
+
+  @Override
+  public void addAnnotation(String description, Map<String, AttributeValue> attributes) {}
+
+  @Override
+  public void addAnnotation(Annotation annotation) {}
+
+  @Override
+  public void addMessageEvent(MessageEvent messageEvent) {}
+
+  @Override
+  public void addLink(Link link) {}
+
+  @Override
+  public void setStatus(Status status) {}
+
+  @Override
+  public void end(EndSpanOptions options) {}
+}
diff --git a/contrib/log_correlation/stackdriver/README.md b/contrib/log_correlation/stackdriver/README.md
index 8d99ff2..17636f7 100644
--- a/contrib/log_correlation/stackdriver/README.md
+++ b/contrib/log_correlation/stackdriver/README.md
@@ -1,18 +1,12 @@
 # OpenCensus Stackdriver Log Correlation
 
-This subproject is currently experimental, so it may be redesigned or removed in the future.  It
-will remain experimental until we have a specification for a log correlation feature in
-[opencensus-specs](https://github.com/census-instrumentation/opencensus-specs/)
-(issue [#123](https://github.com/census-instrumentation/opencensus-specs/issues/123)).
-
 The `opencensus-contrib-log-correlation-stackdriver` artifact provides a
 [Stackdriver Logging](https://cloud.google.com/logging/)
 [`LoggingEnhancer`](http://googlecloudplatform.github.io/google-cloud-java/google-cloud-clients/apidocs/com/google/cloud/logging/LoggingEnhancer.html)
 that automatically adds tracing data to log entries. The class name is
-`OpenCensusTraceLoggingEnhancer`. `OpenCensusTraceLoggingEnhancer` adds the current trace and span
-ID to each log entry, which allows Stackdriver to display the log entries associated with each
-trace, or filter logs based on trace or span ID. It currently also adds the sampling decision using
-the label "`opencensusTraceSampled`".
+`OpenCensusTraceLoggingEnhancer`. `OpenCensusTraceLoggingEnhancer` adds the current trace ID, span
+ID, and sampling decision to each log entry, which allows Stackdriver to display the log entries
+associated with each trace, or filter logs based on trace or span ID.
 
 ## Instructions
 
@@ -35,7 +29,7 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-log-correlation-stackdriver</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -43,7 +37,7 @@
 
 For Gradle add to your dependencies:
 ```groovy
-runtime 'io.opencensus:opencensus-contrib-log-correlation-stackdriver:0.16.1'
+runtime 'io.opencensus:opencensus-contrib-log-correlation-stackdriver:0.28.3'
 ```
 
 ### Configure the `OpenCensusTraceLoggingEnhancer`
@@ -59,21 +53,6 @@
 
 `io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.projectId`
 
-#### Choosing when to add tracing data to log entries
-
-The following property controls the decision to add tracing data from the current span to a log
-entry:
-
-`io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.spanSelection`
-
-The allowed values are:
-
-* `ALL_SPANS`: adds tracing data to all log entries (default)
-
-* `NO_SPANS`: disables the log correlation feature
-
-* `SAMPLED_SPANS`: adds tracing data to log entries when the current span is sampled
-
 Other aspects of configuring the `OpenCensusTraceLoggingEnhancer` depend on the logging
 implementation and `google-cloud-logging` adapter in use.
 
@@ -82,16 +61,14 @@
 The `LoggingAppender` should already be configured in `logback.xml` as described in
 https://cloud.google.com/logging/docs/setup/java#logback_appender. Add
 "`io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer`" to the list of
-enhancers. Optionally, set the `spanSelection` and `projectId` properties described above as system
-properties.
+enhancers. Optionally, set the `projectId` property described above as a system property.
 
 Here is an example `logback.xml`, based on the
 [`google-cloud-logging-logback` example](https://github.com/GoogleCloudPlatform/java-docs-samples/blob/a2b04b20d81ee631439a9368fb99b44849519e28/logging/logback/src/main/resources/logback.xml).
-It specifies the `LoggingEnhancer` class and sets both optional properties:
+It specifies the `LoggingEnhancer` class and sets the optional project ID property:
 
 ```xml
 <configuration>
-  <property scope="system" name="io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.spanSelection" value="SAMPLED_SPANS" />
   <property scope="system" name="io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.projectId" value="my-project-id" />
   <appender name="CLOUD" class="com.google.cloud.logging.logback.LoggingAppender">
     <enhancer>io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer</enhancer>
@@ -112,12 +89,11 @@
 The `LoggingHandler` should already be configured in a logging `.properties` file, as described in
 https://cloud.google.com/logging/docs/setup/java#jul_handler. Add
 "`io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer`" to the list of
-enhancers. Optionally, set the `spanSelection` and `projectId` properties described above in the
-properties file.
+enhancers. Optionally, set the `projectId` property described above in the properties file.
 
 Here is an example `.properties` file, based on the
 [`google-cloud-logging` example](https://github.com/GoogleCloudPlatform/java-docs-samples/blob/a2b04b20d81ee631439a9368fb99b44849519e28/logging/jul/src/main/resources/logging.properties).
-It specifies the `LoggingEnhancer` class and sets both optional properties:
+It specifies the `LoggingEnhancer` class and sets the optional project ID property:
 
 ```properties
 .level = INFO
@@ -125,7 +101,6 @@
 com.example.MyClass.handlers=com.google.cloud.logging.LoggingHandler
 
 com.google.cloud.logging.LoggingHandler.enhancers=io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer
-io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.spanSelection=SAMPLED_SPANS
 io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.projectId=my-project-id
 ```
 
diff --git a/contrib/log_correlation/stackdriver/build.gradle b/contrib/log_correlation/stackdriver/build.gradle
index 4d8a298..67cd265 100644
--- a/contrib/log_correlation/stackdriver/build.gradle
+++ b/contrib/log_correlation/stackdriver/build.gradle
@@ -4,7 +4,21 @@
 
 dependencies {
     compile project(':opencensus-api'),
-            libraries.google_cloud_logging
+            libraries.grpc_auth,
+            libraries.grpc_core,
+            libraries.grpc_netty_shaded,
+            libraries.grpc_stub
+
+    compile (libraries.google_cloud_logging) {
+        // Prefer library version.
+        exclude group: 'io.grpc', module: 'grpc-auth'
+        exclude group: 'io.grpc', module: 'grpc-core'
+        exclude group: 'io.grpc', module: 'grpc-netty-shaded'
+        exclude group: 'io.grpc', module: 'grpc-stub'
+
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
 
     testCompile libraries.guava
 
diff --git a/contrib/log_correlation/stackdriver/src/main/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancer.java b/contrib/log_correlation/stackdriver/src/main/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancer.java
index 5c3e21f..95f72d0 100644
--- a/contrib/log_correlation/stackdriver/src/main/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancer.java
+++ b/contrib/log_correlation/stackdriver/src/main/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancer.java
@@ -19,112 +19,57 @@
 import com.google.cloud.ServiceOptions;
 import com.google.cloud.logging.LogEntry;
 import com.google.cloud.logging.LoggingEnhancer;
-import io.opencensus.common.ExperimentalApi;
 import io.opencensus.trace.Span;
 import io.opencensus.trace.SpanContext;
 import io.opencensus.trace.TraceId;
-import io.opencensus.trace.unsafe.ContextUtils;
+import io.opencensus.trace.unsafe.ContextHandleUtils;
 import java.util.logging.LogManager;
 import javax.annotation.Nullable;
 
 /**
  * Stackdriver {@link LoggingEnhancer} that adds OpenCensus tracing data to log entries.
  *
- * <p>This feature is currently experimental.
- *
- * @since 0.15
+ * @since 0.17
  */
-@ExperimentalApi
 public final class OpenCensusTraceLoggingEnhancer implements LoggingEnhancer {
-  private static final String SAMPLED_LABEL_KEY = "opencensusTraceSampled";
-  private static final SpanSelection DEFAULT_SPAN_SELECTION = SpanSelection.ALL_SPANS;
 
   /**
    * Name of the property that overrides the default project ID (overrides the value returned by
    * {@code com.google.cloud.ServiceOptions.getDefaultProjectId()}). The name is {@value}.
    *
-   * @since 0.15
+   * @since 0.17
    */
   public static final String PROJECT_ID_PROPERTY_NAME =
       "io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.projectId";
 
-  /**
-   * Name of the property that defines the {@link SpanSelection}. The name is {@value}.
-   *
-   * @since 0.15
-   */
-  public static final String SPAN_SELECTION_PROPERTY_NAME =
-      "io.opencensus.contrib.logcorrelation.stackdriver."
-          + "OpenCensusTraceLoggingEnhancer.spanSelection";
-
-  private final String projectId;
-  private final SpanSelection spanSelection;
+  @Nullable private final String projectId;
 
   // This field caches the prefix used for the LogEntry.trace field and is derived from projectId.
   private final String tracePrefix;
 
   /**
-   * How to decide whether to add tracing data from the current span to a log entry.
-   *
-   * @since 0.15
-   */
-  public enum SpanSelection {
-
-    /**
-     * Never add tracing data to log entries. This constant disables the log correlation feature.
-     *
-     * @since 0.15
-     */
-    NO_SPANS,
-
-    /**
-     * Add tracing data to a log entry iff the current span is sampled.
-     *
-     * @since 0.15
-     */
-    SAMPLED_SPANS,
-
-    /**
-     * Always add tracing data to log entries, even when the current span is not sampled. This is
-     * the default.
-     *
-     * @since 0.15
-     */
-    ALL_SPANS
-  }
-
-  /**
    * Constructor to be called by reflection, e.g., by a google-cloud-java {@code LoggingHandler} or
    * google-cloud-logging-logback {@code LoggingAppender}.
    *
-   * <p>This constructor looks up the project ID and {@link SpanSelection SpanSelection} from the
-   * environment. It uses the default project ID (the value returned by {@code
-   * com.google.cloud.ServiceOptions.getDefaultProjectId()}), unless the ID is overridden by the
-   * property {@value #PROJECT_ID_PROPERTY_NAME}. It looks up the {@code SpanSelection} using the
-   * property {@value #SPAN_SELECTION_PROPERTY_NAME}. Each property can be specified with a {@link
-   * java.util.logging} property or a system property, with preference given to the logging
-   * property.
+   * <p>This constructor looks up the project ID from the environment. It uses the default project
+   * ID (the value returned by {@code com.google.cloud.ServiceOptions.getDefaultProjectId()}),
+   * unless the ID is overridden by the property {@value #PROJECT_ID_PROPERTY_NAME}. The property
+   * can be specified with a {@link java.util.logging} property or a system property, with
+   * preference given to the logging property.
    *
-   * @since 0.15
+   * @since 0.17
    */
   public OpenCensusTraceLoggingEnhancer() {
-    this(lookUpProjectId(), lookUpSpanSelectionProperty());
+    this(lookUpProjectId());
   }
 
-  /**
-   * Constructs a {@code OpenCensusTraceLoggingEnhancer} with the given project ID and {@code
-   * SpanSelection}.
-   *
-   * @param projectId the project ID for this instance.
-   * @param spanSelection the {@code SpanSelection} for this instance.
-   * @since 0.15
-   */
-  public OpenCensusTraceLoggingEnhancer(@Nullable String projectId, SpanSelection spanSelection) {
-    this.projectId = projectId == null ? "" : projectId;
-    this.spanSelection = spanSelection;
-    this.tracePrefix = "projects/" + this.projectId + "/traces/";
+  // visible for testing
+  OpenCensusTraceLoggingEnhancer(@Nullable String projectId) {
+    this.projectId = projectId;
+    this.tracePrefix = "projects/" + (projectId == null ? "" : projectId) + "/traces/";
   }
 
+  @Nullable
   private static String lookUpProjectId() {
     String projectIdProperty = lookUpProperty(PROJECT_ID_PROPERTY_NAME);
     return projectIdProperty == null || projectIdProperty.isEmpty()
@@ -132,21 +77,6 @@
         : projectIdProperty;
   }
 
-  private static SpanSelection lookUpSpanSelectionProperty() {
-    String spanSelectionProperty = lookUpProperty(SPAN_SELECTION_PROPERTY_NAME);
-    return spanSelectionProperty == null || spanSelectionProperty.isEmpty()
-        ? DEFAULT_SPAN_SELECTION
-        : parseSpanSelection(spanSelectionProperty);
-  }
-
-  private static SpanSelection parseSpanSelection(String spanSelection) {
-    try {
-      return SpanSelection.valueOf(spanSelection);
-    } catch (IllegalArgumentException e) {
-      return DEFAULT_SPAN_SELECTION;
-    }
-  }
-
   // An OpenCensusTraceLoggingEnhancer property can be set with a logging property or a system
   // property.
   @Nullable
@@ -155,47 +85,20 @@
     return property == null || property.isEmpty() ? System.getProperty(name) : property;
   }
 
-  /**
-   * Returns the project ID setting for this instance.
-   *
-   * @return the project ID setting for this instance.
-   * @since 0.15
-   */
-  public String getProjectId() {
+  // visible for testing
+  @Nullable
+  String getProjectId() {
     return projectId;
   }
 
-  /**
-   * Returns the {@code SpanSelection} setting for this instance.
-   *
-   * @return the {@code SpanSelection} setting for this instance.
-   * @since 0.15
-   */
-  public SpanSelection getSpanSelection() {
-    return spanSelection;
-  }
-
   // This method avoids getting the current span when the feature is disabled, for efficiency.
   @Override
   public void enhanceLogEntry(LogEntry.Builder builder) {
-    switch (spanSelection) {
-      case NO_SPANS:
-        return;
-      case SAMPLED_SPANS:
-        SpanContext span = getCurrentSpanContext();
-        if (span.getTraceOptions().isSampled()) {
-          addTracingData(tracePrefix, span, builder);
-        }
-        return;
-      case ALL_SPANS:
-        addTracingData(tracePrefix, getCurrentSpanContext(), builder);
-        return;
-    }
-    throw new AssertionError("Unknown spanSelection: " + spanSelection);
+    addTracingData(tracePrefix, getCurrentSpanContext(), builder);
   }
 
   private static SpanContext getCurrentSpanContext() {
-    Span span = ContextUtils.CONTEXT_SPAN_KEY.get();
+    Span span = ContextHandleUtils.getValue(ContextHandleUtils.currentContext());
     return span == null ? SpanContext.INVALID : span.getContext();
   }
 
@@ -203,9 +106,7 @@
       String tracePrefix, SpanContext span, LogEntry.Builder builder) {
     builder.setTrace(formatTraceId(tracePrefix, span.getTraceId()));
     builder.setSpanId(span.getSpanId().toLowerBase16());
-
-    // TODO(sebright): Find the correct way to add the sampling decision.
-    builder.addLabel(SAMPLED_LABEL_KEY, Boolean.toString(span.getTraceOptions().isSampled()));
+    builder.setTraceSampled(span.getTraceOptions().isSampled());
   }
 
   private static String formatTraceId(String tracePrefix, TraceId traceId) {
diff --git a/contrib/log_correlation/stackdriver/src/test/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancerTest.java b/contrib/log_correlation/stackdriver/src/test/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancerTest.java
index c116f09..ef2a89f 100644
--- a/contrib/log_correlation/stackdriver/src/test/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancerTest.java
+++ b/contrib/log_correlation/stackdriver/src/test/java/io/opencensus/contrib/logcorrelation/stackdriver/OpenCensusTraceLoggingEnhancerTest.java
@@ -17,13 +17,14 @@
 package io.opencensus.contrib.logcorrelation.stackdriver;
 
 import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import com.google.cloud.logging.LogEntry;
 import com.google.cloud.logging.LoggingEnhancer;
 import com.google.common.base.Charsets;
 import com.google.common.io.CharSource;
 import io.opencensus.common.Scope;
-import io.opencensus.contrib.logcorrelation.stackdriver.OpenCensusTraceLoggingEnhancer.SpanSelection;
 import io.opencensus.trace.Annotation;
 import io.opencensus.trace.AttributeValue;
 import io.opencensus.trace.BlankSpan;
@@ -58,105 +59,45 @@
   private static final Tracer tracer = Tracing.getTracer();
 
   @Test
-  public void enhanceLogEntry_DoNotAddSampledSpanToLogEntryWithNoSpans() {
+  public void enhanceLogEntry_AddSampledSpanToLogEntry() {
     LogEntry logEntry =
         getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-1", SpanSelection.NO_SPANS),
-            new TestSpan(
-                SpanContext.create(
-                    TraceId.fromLowerBase16("3da31be987098abb08c71c7700d2680e"),
-                    SpanId.fromLowerBase16("51b109f15e0d3881"),
-                    TraceOptions.builder().setIsSampled(true).build(),
-                    EMPTY_TRACESTATE)));
-    assertContainsNoTracingData(logEntry);
-  }
-
-  @Test
-  public void enhanceLogEntry_AddSampledSpanToLogEntryWithSampledSpans() {
-    LogEntry logEntry =
-        getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-2", SpanSelection.SAMPLED_SPANS),
-            new TestSpan(
-                SpanContext.create(
-                    TraceId.fromLowerBase16("4c9874d0b41224cce77ff74ee10f5ee6"),
-                    SpanId.fromLowerBase16("592ae363e92cb3dd"),
-                    TraceOptions.builder().setIsSampled(true).build(),
-                    EMPTY_TRACESTATE)));
-    assertThat(logEntry.getLabels()).containsEntry("opencensusTraceSampled", "true");
-    assertThat(logEntry.getTrace())
-        .isEqualTo("projects/my-test-project-2/traces/4c9874d0b41224cce77ff74ee10f5ee6");
-    assertThat(logEntry.getSpanId()).isEqualTo("592ae363e92cb3dd");
-  }
-
-  @Test
-  public void enhanceLogEntry_AddSampledSpanToLogEntryWithAllSpans() {
-    LogEntry logEntry =
-        getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-3", SpanSelection.ALL_SPANS),
+            new OpenCensusTraceLoggingEnhancer("my-test-project-3"),
             new TestSpan(
                 SpanContext.create(
                     TraceId.fromLowerBase16("4c6af40c499951eb7de2777ba1e4fefa"),
                     SpanId.fromLowerBase16("de52e84d13dd232d"),
                     TraceOptions.builder().setIsSampled(true).build(),
                     EMPTY_TRACESTATE)));
-    assertThat(logEntry.getLabels()).containsEntry("opencensusTraceSampled", "true");
+    assertTrue(logEntry.getTraceSampled());
     assertThat(logEntry.getTrace())
         .isEqualTo("projects/my-test-project-3/traces/4c6af40c499951eb7de2777ba1e4fefa");
     assertThat(logEntry.getSpanId()).isEqualTo("de52e84d13dd232d");
   }
 
   @Test
-  public void enhanceLogEntry_DoNotAddNonSampledSpanToLogEntryWithNoSpans() {
+  public void enhanceLogEntry_AddNonSampledSpanToLogEntry() {
     LogEntry logEntry =
         getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-4", SpanSelection.NO_SPANS),
-            new TestSpan(
-                SpanContext.create(
-                    TraceId.fromLowerBase16("88ab22b18b97369df065ca830e41cf6a"),
-                    SpanId.fromLowerBase16("8987d372039021fd"),
-                    TraceOptions.builder().setIsSampled(false).build(),
-                    EMPTY_TRACESTATE)));
-    assertContainsNoTracingData(logEntry);
-  }
-
-  @Test
-  public void enhanceLogEntry_DoNotAddNonSampledSpanToLogEntryWithSampledSpans() {
-    LogEntry logEntry =
-        getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-5", SpanSelection.SAMPLED_SPANS),
-            new TestSpan(
-                SpanContext.create(
-                    TraceId.fromLowerBase16("7f4703d9bb02f4f2e67fb840103cdd34"),
-                    SpanId.fromLowerBase16("2d7d95a555557434"),
-                    TraceOptions.builder().setIsSampled(false).build(),
-                    EMPTY_TRACESTATE)));
-    assertContainsNoTracingData(logEntry);
-  }
-
-  @Test
-  public void enhanceLogEntry_AddNonSampledSpanToLogEntryWithAllSpans() {
-    LogEntry logEntry =
-        getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-6", SpanSelection.ALL_SPANS),
+            new OpenCensusTraceLoggingEnhancer("my-test-project-6"),
             new TestSpan(
                 SpanContext.create(
                     TraceId.fromLowerBase16("72c905c76f99e99974afd84dc053a480"),
                     SpanId.fromLowerBase16("731e102335b7a5a0"),
                     TraceOptions.builder().setIsSampled(false).build(),
                     EMPTY_TRACESTATE)));
-    assertThat(logEntry.getLabels()).containsEntry("opencensusTraceSampled", "false");
+    assertFalse(logEntry.getTraceSampled());
     assertThat(logEntry.getTrace())
         .isEqualTo("projects/my-test-project-6/traces/72c905c76f99e99974afd84dc053a480");
     assertThat(logEntry.getSpanId()).isEqualTo("731e102335b7a5a0");
   }
 
   @Test
-  public void enhanceLogEntry_AddBlankSpanToLogEntryWithAllSpans() {
+  public void enhanceLogEntry_AddBlankSpanToLogEntry() {
     LogEntry logEntry =
         getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer("my-test-project-7", SpanSelection.ALL_SPANS),
-            BlankSpan.INSTANCE);
-    assertThat(logEntry.getLabels().get("opencensusTraceSampled")).isEqualTo("false");
+            new OpenCensusTraceLoggingEnhancer("my-test-project-7"), BlankSpan.INSTANCE);
+    assertFalse(logEntry.getTraceSampled());
     assertThat(logEntry.getTrace())
         .isEqualTo("projects/my-test-project-7/traces/00000000000000000000000000000000");
     assertThat(logEntry.getSpanId()).isEqualTo("0000000000000000");
@@ -166,7 +107,7 @@
   public void enhanceLogEntry_ConvertNullProjectIdToEmptyString() {
     LogEntry logEntry =
         getEnhancedLogEntry(
-            new OpenCensusTraceLoggingEnhancer(null, SpanSelection.ALL_SPANS),
+            new OpenCensusTraceLoggingEnhancer(null),
             new TestSpan(
                 SpanContext.create(
                     TraceId.fromLowerBase16("bfb4248a24325a905873a1d43001d9a0"),
@@ -187,18 +128,6 @@
     }
   }
 
-  private static void assertContainsNoTracingData(LogEntry logEntry) {
-    assertThat(logEntry.getLabels()).doesNotContainKey("opencensusTraceSampled");
-    assertThat(logEntry.getTrace()).isNull();
-    assertThat(logEntry.getSpanId()).isNull();
-  }
-
-  @Test
-  public void spanSelectionDefaultIsAllSpans() {
-    assertThat(new OpenCensusTraceLoggingEnhancer().getSpanSelection())
-        .isEqualTo(SpanSelection.ALL_SPANS);
-  }
-
   @Test
   @SuppressWarnings("TruthConstantAsserts")
   public void projectIdPropertyName() {
@@ -207,13 +136,6 @@
   }
 
   @Test
-  @SuppressWarnings("TruthConstantAsserts")
-  public void spanSelectionPropertyName() {
-    assertThat(OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME)
-        .isEqualTo(OpenCensusTraceLoggingEnhancer.class.getName() + ".spanSelection");
-  }
-
-  @Test
   public void setProjectIdWithGoogleCloudJava() {
     try {
       System.setProperty(GOOGLE_CLOUD_PROJECT, "my-project-id");
@@ -241,17 +163,6 @@
   }
 
   @Test
-  public void setSpanSelectionWithSystemProperty() {
-    try {
-      System.setProperty(OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME, "NO_SPANS");
-      assertThat(new OpenCensusTraceLoggingEnhancer().getSpanSelection())
-          .isEqualTo(SpanSelection.NO_SPANS);
-    } finally {
-      System.clearProperty(OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME);
-    }
-  }
-
-  @Test
   public void overrideProjectIdWithLoggingProperty() throws IOException {
     try {
       LogManager.getLogManager()
@@ -271,33 +182,19 @@
   }
 
   @Test
-  public void setSpanSelectionWithLoggingProperty() throws IOException {
-    try {
-      LogManager.getLogManager()
-          .readConfiguration(
-              stringToInputStream(
-                  OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME + "=SAMPLED_SPANS"));
-      assertThat(new OpenCensusTraceLoggingEnhancer().getSpanSelection())
-          .isEqualTo(SpanSelection.SAMPLED_SPANS);
-    } finally {
-      LogManager.getLogManager().reset();
-    }
-  }
-
-  @Test
   public void loggingPropertyTakesPrecedenceOverSystemProperty() throws IOException {
     try {
       LogManager.getLogManager()
           .readConfiguration(
               stringToInputStream(
-                  OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME + "=NO_SPANS"));
+                  OpenCensusTraceLoggingEnhancer.PROJECT_ID_PROPERTY_NAME + "=logging property"));
       try {
         System.setProperty(
-            OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME, "SAMPLED_SPANS");
-        assertThat(new OpenCensusTraceLoggingEnhancer().getSpanSelection())
-            .isEqualTo(SpanSelection.NO_SPANS);
+            OpenCensusTraceLoggingEnhancer.PROJECT_ID_PROPERTY_NAME, "system property");
+        assertThat(new OpenCensusTraceLoggingEnhancer().getProjectId())
+            .isEqualTo("logging property");
       } finally {
-        System.clearProperty(OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME);
+        System.clearProperty(OpenCensusTraceLoggingEnhancer.PROJECT_ID_PROPERTY_NAME);
       }
     } finally {
       LogManager.getLogManager().reset();
@@ -308,18 +205,6 @@
     return CharSource.wrap(contents).asByteSource(Charsets.UTF_8).openBufferedStream();
   }
 
-  @Test
-  public void useDefaultValueForInvalidSpanSelection() {
-    try {
-      System.setProperty(
-          OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME, "INVALID_SPAN_SELECTION");
-      assertThat(new OpenCensusTraceLoggingEnhancer().getSpanSelection())
-          .isEqualTo(SpanSelection.ALL_SPANS);
-    } finally {
-      System.clearProperty(OpenCensusTraceLoggingEnhancer.SPAN_SELECTION_PROPERTY_NAME);
-    }
-  }
-
   private static final class TestSpan extends Span {
     TestSpan(SpanContext context) {
       super(context, EnumSet.of(Options.RECORD_EVENTS));
diff --git a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResource.java b/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResource.java
deleted file mode 100644
index c828906..0000000
--- a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResource.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.contrib.monitoredresource.util;
-
-import com.google.auto.value.AutoValue;
-import javax.annotation.Nullable;
-import javax.annotation.concurrent.Immutable;
-
-/**
- * {@link MonitoredResource} represents an auto-detected monitored resource used by application for
- * exporting stats. It has a {@code ResourceType} associated with a mapping from resource labels to
- * values.
- *
- * @since 0.13
- */
-@Immutable
-public abstract class MonitoredResource {
-
-  MonitoredResource() {}
-
-  /**
-   * Returns the {@link ResourceType} of this {@link MonitoredResource}.
-   *
-   * @return the {@code ResourceType}.
-   * @since 0.13
-   */
-  public abstract ResourceType getResourceType();
-
-  /*
-   * Returns the first of two given parameters that is not null, if either is, or otherwise
-   * throws a NullPointerException.
-   */
-  private static <T> T firstNonNull(@Nullable T first, @Nullable T second) {
-    if (first != null) {
-      return first;
-    }
-    if (second != null) {
-      return second;
-    }
-    throw new NullPointerException("Both parameters are null");
-  }
-
-  // TODO(songya): consider using a tagged union match() approach (that will introduce
-  // dependency on opencensus-api).
-
-  /**
-   * {@link MonitoredResource} for AWS EC2 instance.
-   *
-   * @since 0.13
-   */
-  @Immutable
-  @AutoValue
-  public abstract static class AwsEc2InstanceMonitoredResource extends MonitoredResource {
-
-    private static final String AWS_ACCOUNT =
-        firstNonNull(AwsIdentityDocUtils.getValueFromAwsIdentityDocument("accountId"), "");
-    private static final String AWS_INSTANCE_ID =
-        firstNonNull(AwsIdentityDocUtils.getValueFromAwsIdentityDocument("instanceId"), "");
-    private static final String AWS_REGION =
-        firstNonNull(AwsIdentityDocUtils.getValueFromAwsIdentityDocument("region"), "");
-
-    @Override
-    public ResourceType getResourceType() {
-      return ResourceType.AWS_EC2_INSTANCE;
-    }
-
-    /**
-     * Returns the AWS account ID.
-     *
-     * @return the AWS account ID.
-     * @since 0.13
-     */
-    public abstract String getAccount();
-
-    /**
-     * Returns the AWS EC2 instance ID.
-     *
-     * @return the AWS EC2 instance ID.
-     * @since 0.13
-     */
-    public abstract String getInstanceId();
-
-    /**
-     * Returns the AWS region.
-     *
-     * @return the AWS region.
-     * @since 0.13
-     */
-    public abstract String getRegion();
-
-    /**
-     * Returns an {@link AwsEc2InstanceMonitoredResource}.
-     *
-     * @param account the AWS account ID.
-     * @param instanceId the AWS EC2 instance ID.
-     * @param region the AWS region.
-     * @return an {@code AwsEc2InstanceMonitoredResource}.
-     * @since 0.15
-     */
-    public static AwsEc2InstanceMonitoredResource create(
-        String account, String instanceId, String region) {
-      return new AutoValue_MonitoredResource_AwsEc2InstanceMonitoredResource(
-          account, instanceId, region);
-    }
-
-    static AwsEc2InstanceMonitoredResource create() {
-      return create(AWS_ACCOUNT, AWS_INSTANCE_ID, AWS_REGION);
-    }
-  }
-
-  /**
-   * {@link MonitoredResource} for GCP GCE instance.
-   *
-   * @since 0.13
-   */
-  @Immutable
-  @AutoValue
-  public abstract static class GcpGceInstanceMonitoredResource extends MonitoredResource {
-
-    private static final String GCP_ACCOUNT_ID = firstNonNull(GcpMetadataConfig.getProjectId(), "");
-    private static final String GCP_INSTANCE_ID =
-        firstNonNull(GcpMetadataConfig.getInstanceId(), "");
-    private static final String GCP_ZONE = firstNonNull(GcpMetadataConfig.getZone(), "");
-
-    @Override
-    public ResourceType getResourceType() {
-      return ResourceType.GCP_GCE_INSTANCE;
-    }
-
-    /**
-     * Returns the GCP account number for the instance.
-     *
-     * @return the GCP account number for the instance.
-     * @since 0.13
-     */
-    public abstract String getAccount();
-
-    /**
-     * Returns the GCP GCE instance ID.
-     *
-     * @return the GCP GCE instance ID.
-     * @since 0.13
-     */
-    public abstract String getInstanceId();
-
-    /**
-     * Returns the GCP zone.
-     *
-     * @return the GCP zone.
-     * @since 0.13
-     */
-    public abstract String getZone();
-
-    /**
-     * Returns a {@link GcpGceInstanceMonitoredResource}.
-     *
-     * @param account the GCP account number.
-     * @param instanceId the GCP GCE instance ID.
-     * @param zone the GCP zone.
-     * @return a {@code GcpGceInstanceMonitoredResource}.
-     * @since 0.15
-     */
-    public static GcpGceInstanceMonitoredResource create(
-        String account, String instanceId, String zone) {
-      return new AutoValue_MonitoredResource_GcpGceInstanceMonitoredResource(
-          account, instanceId, zone);
-    }
-
-    static GcpGceInstanceMonitoredResource create() {
-      return create(GCP_ACCOUNT_ID, GCP_INSTANCE_ID, GCP_ZONE);
-    }
-  }
-
-  /**
-   * {@link MonitoredResource} for GCP GKE container.
-   *
-   * @since 0.13
-   */
-  @Immutable
-  @AutoValue
-  public abstract static class GcpGkeContainerMonitoredResource extends MonitoredResource {
-
-    private static final String GCP_ACCOUNT_ID = firstNonNull(GcpMetadataConfig.getProjectId(), "");
-    private static final String GCP_CLUSTER_NAME =
-        firstNonNull(GcpMetadataConfig.getClusterName(), "");
-    private static final String GCP_CONTAINER_NAME =
-        firstNonNull(System.getenv("CONTAINER_NAME"), "");
-    private static final String GCP_NAMESPACE_ID = firstNonNull(System.getenv("NAMESPACE"), "");
-    private static final String GCP_INSTANCE_ID =
-        firstNonNull(GcpMetadataConfig.getInstanceId(), "");
-    private static final String GCP_POD_ID = firstNonNull(System.getenv("HOSTNAME"), "");
-    private static final String GCP_ZONE = firstNonNull(GcpMetadataConfig.getZone(), "");
-
-    @Override
-    public ResourceType getResourceType() {
-      return ResourceType.GCP_GKE_CONTAINER;
-    }
-
-    /**
-     * Returns the GCP account number for the instance.
-     *
-     * @return the GCP account number for the instance.
-     * @since 0.13
-     */
-    public abstract String getAccount();
-
-    /**
-     * Returns the GCP GKE cluster name.
-     *
-     * @return the GCP GKE cluster name.
-     * @since 0.13
-     */
-    public abstract String getClusterName();
-
-    /**
-     * Returns the GCP GKE container name.
-     *
-     * @return the GCP GKE container name.
-     * @since 0.13
-     */
-    public abstract String getContainerName();
-
-    /**
-     * Returns the GCP GKE namespace ID.
-     *
-     * @return the GCP GKE namespace ID.
-     * @since 0.13
-     */
-    public abstract String getNamespaceId();
-
-    /**
-     * Returns the GCP GKE instance ID.
-     *
-     * @return the GCP GKE instance ID.
-     * @since 0.13
-     */
-    public abstract String getInstanceId();
-
-    /**
-     * Returns the GCP GKE Pod ID.
-     *
-     * @return the GCP GKE Pod ID.
-     * @since 0.13
-     */
-    public abstract String getPodId();
-
-    /**
-     * Returns the GCP zone.
-     *
-     * @return the GCP zone.
-     * @since 0.13
-     */
-    public abstract String getZone();
-
-    /**
-     * Returns a {@link GcpGkeContainerMonitoredResource}.
-     *
-     * @param account the GCP account number.
-     * @param clusterName the GCP GKE cluster name.
-     * @param containerName the GCP GKE container name.
-     * @param namespaceId the GCP GKE namespace ID.
-     * @param instanceId the GCP GKE instance ID.
-     * @param podId the GCP GKE Pod ID.
-     * @param zone the GCP zone.
-     * @return a {@code GcpGkeContainerMonitoredResource}.
-     * @since 0.15
-     */
-    public static GcpGkeContainerMonitoredResource create(
-        String account,
-        String clusterName,
-        String containerName,
-        String namespaceId,
-        String instanceId,
-        String podId,
-        String zone) {
-      return new AutoValue_MonitoredResource_GcpGkeContainerMonitoredResource(
-          account, clusterName, containerName, namespaceId, instanceId, podId, zone);
-    }
-
-    static GcpGkeContainerMonitoredResource create() {
-      return create(
-          GCP_ACCOUNT_ID,
-          GCP_CLUSTER_NAME,
-          GCP_CONTAINER_NAME,
-          GCP_NAMESPACE_ID,
-          GCP_INSTANCE_ID,
-          GCP_POD_ID,
-          GCP_ZONE);
-    }
-  }
-}
diff --git a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtils.java b/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtils.java
deleted file mode 100644
index 8ff0ff9..0000000
--- a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtils.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.contrib.monitoredresource.util;
-
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
-import javax.annotation.Nullable;
-
-/**
- * Utilities for for auto detecting monitored resource based on the environment where the
- * application is running.
- *
- * @since 0.13
- */
-public final class MonitoredResourceUtils {
-
-  /**
-   * Returns a self-configured monitored resource, or {@code null} if the application is not running
-   * on a supported environment.
-   *
-   * @return a {@code MonitoredResource}.
-   * @since 0.13
-   */
-  @Nullable
-  public static MonitoredResource getDefaultResource() {
-    if (System.getenv("KUBERNETES_SERVICE_HOST") != null) {
-      return GcpGkeContainerMonitoredResource.create();
-    }
-    if (GcpMetadataConfig.getInstanceId() != null) {
-      return GcpGceInstanceMonitoredResource.create();
-    }
-    if (AwsIdentityDocUtils.isRunningOnAwsEc2()) {
-      return AwsEc2InstanceMonitoredResource.create();
-    }
-    return null;
-  }
-
-  private MonitoredResourceUtils() {}
-}
diff --git a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/ResourceType.java b/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/ResourceType.java
deleted file mode 100644
index f281667..0000000
--- a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/ResourceType.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.contrib.monitoredresource.util;
-
-/**
- * {@link ResourceType} represents the type of supported monitored resources that can be
- * automatically detected by OpenCensus.
- *
- * @since 0.13
- */
-public enum ResourceType {
-
-  /**
-   * Resource for GCP GKE container.
-   *
-   * @since 0.13
-   */
-  GCP_GKE_CONTAINER,
-
-  /**
-   * Resource for GCP GCE instance.
-   *
-   * @since 0.13
-   */
-  GCP_GCE_INSTANCE,
-
-  /**
-   * Resource for AWS EC2 instance.
-   *
-   * @since 0.13
-   */
-  AWS_EC2_INSTANCE
-}
diff --git a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceTest.java b/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceTest.java
deleted file mode 100644
index 0defcbd..0000000
--- a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceTest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.contrib.monitoredresource.util;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/** Unit tests for {@link MonitoredResource}. */
-@RunWith(JUnit4.class)
-public class MonitoredResourceTest {
-
-  private static final String AWS_ACCOUNT = "aws-account";
-  private static final String AWS_INSTANCE = "instance";
-  private static final String AWS_REGION = "us-west-2";
-  private static final String GCP_PROJECT = "gcp-project";
-  private static final String GCP_INSTANCE = "instance";
-  private static final String GCP_ZONE = "us-east1";
-  private static final String GCP_GKE_NAMESPACE = "namespace";
-  private static final String GCP_GKE_POD_ID = "pod-id";
-  private static final String GCP_GKE_CONTAINER_NAME = "container";
-  private static final String GCP_GKE_CLUSTER_NAME = "cluster";
-
-  @Test
-  public void testAwsEc2InstanceMonitoredResource() {
-    AwsEc2InstanceMonitoredResource resource =
-        AwsEc2InstanceMonitoredResource.create(AWS_ACCOUNT, AWS_INSTANCE, AWS_REGION);
-    assertThat(resource.getResourceType()).isEqualTo(ResourceType.AWS_EC2_INSTANCE);
-    assertThat(resource.getAccount()).isEqualTo(AWS_ACCOUNT);
-    assertThat(resource.getInstanceId()).isEqualTo(AWS_INSTANCE);
-    assertThat(resource.getRegion()).isEqualTo(AWS_REGION);
-  }
-
-  @Test
-  public void testGcpGceInstanceMonitoredResource() {
-    GcpGceInstanceMonitoredResource resource =
-        GcpGceInstanceMonitoredResource.create(GCP_PROJECT, GCP_INSTANCE, GCP_ZONE);
-    assertThat(resource.getResourceType()).isEqualTo(ResourceType.GCP_GCE_INSTANCE);
-    assertThat(resource.getAccount()).isEqualTo(GCP_PROJECT);
-    assertThat(resource.getInstanceId()).isEqualTo(GCP_INSTANCE);
-    assertThat(resource.getZone()).isEqualTo(GCP_ZONE);
-  }
-
-  @Test
-  public void testGcpGkeContainerMonitoredResource() {
-    GcpGkeContainerMonitoredResource resource =
-        GcpGkeContainerMonitoredResource.create(
-            GCP_PROJECT,
-            GCP_GKE_CLUSTER_NAME,
-            GCP_GKE_CONTAINER_NAME,
-            GCP_GKE_NAMESPACE,
-            GCP_INSTANCE,
-            GCP_GKE_POD_ID,
-            GCP_ZONE);
-    assertThat(resource.getResourceType()).isEqualTo(ResourceType.GCP_GKE_CONTAINER);
-    assertThat(resource.getAccount()).isEqualTo(GCP_PROJECT);
-    assertThat(resource.getClusterName()).isEqualTo(GCP_GKE_CLUSTER_NAME);
-    assertThat(resource.getContainerName()).isEqualTo(GCP_GKE_CONTAINER_NAME);
-    assertThat(resource.getNamespaceId()).isEqualTo(GCP_GKE_NAMESPACE);
-    assertThat(resource.getInstanceId()).isEqualTo(GCP_INSTANCE);
-    assertThat(resource.getPodId()).isEqualTo(GCP_GKE_POD_ID);
-    assertThat(resource.getZone()).isEqualTo(GCP_ZONE);
-  }
-}
diff --git a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtilsTest.java b/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtilsTest.java
deleted file mode 100644
index 01927a2..0000000
--- a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/MonitoredResourceUtilsTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.contrib.monitoredresource.util;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/** Tests for {@link MonitoredResourceUtils}. */
-@RunWith(JUnit4.class)
-public class MonitoredResourceUtilsTest {
-
-  @Test
-  public void testGetDefaultResource() {
-    MonitoredResource resource = MonitoredResourceUtils.getDefaultResource();
-    if (System.getenv("KUBERNETES_SERVICE_HOST") != null) {
-      assertThat(resource.getResourceType()).isEqualTo(ResourceType.GCP_GKE_CONTAINER);
-    } else if (GcpMetadataConfig.getInstanceId() != null) {
-      assertThat(resource.getResourceType()).isEqualTo(ResourceType.GCP_GCE_INSTANCE);
-    } else if (AwsIdentityDocUtils.isRunningOnAwsEc2()) {
-      assertThat(resource.getResourceType()).isEqualTo(ResourceType.AWS_EC2_INSTANCE);
-    } else {
-      assertThat(resource).isNull();
-    }
-  }
-}
diff --git a/contrib/observability_ready_util/README.md b/contrib/observability_ready_util/README.md
new file mode 100644
index 0000000..f49d898
--- /dev/null
+++ b/contrib/observability_ready_util/README.md
@@ -0,0 +1,58 @@
+# OpenCensus Observability Ready Util for Java
+
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+
+The *OpenCensus Observability Ready Util for Java* allows users to use OpenCensus easily.
+
+It provides a wrapper that
+* Enables [Basic RPC views](https://github.com/census-instrumentation/opencensus-java/blob/2a17c8482ffb04540ea4ac0a5f746ad8d536c996/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcViews.java#L219)
+* Sets Probabilistic sampling rate to `0.0001`
+* Creates and Registers OCAgent Exporter to collect traces and metrics
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-contrib-observability-ready-util</artifactId>
+    <version>0.25.0</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-contrib-observability-ready-util:0.25.0'
+```
+
+### Enable OpenCensus:
+
+```java
+import io.opencensus.contrib.observability.ready.util.BasicSetup;
+
+public class YourClass {
+  public static void main(String[] args) {
+    // It is recommended to call this method before doing any RPC call to avoid missing stats.
+    BasicSetup.enableOpenCensus("with-service-name");
+  }
+}
+```
+
+> If Agent is not yet up and running, Exporter will just retry connection.
+
+### And deploy OpenCensus Agent:
+
+It will require you to deploy the [OpenCensus-Agent](https://github.com/census-instrumentation/opencensus-service#opencensus-agent) in order to export and examine the stats and traces.
+The OpenCensus Agent exporter aka “ocagent-exporter” enables your applications to send the
+observability that they’ve collected using OpenCensus to the OpenCensus Agent.
+
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
diff --git a/contrib/observability_ready_util/build.gradle b/contrib/observability_ready_util/build.gradle
new file mode 100644
index 0000000..1ebe64c
--- /dev/null
+++ b/contrib/observability_ready_util/build.gradle
@@ -0,0 +1,18 @@
+description = 'OpenCensus Observability Ready Util'
+
+apply plugin: 'java'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.8
+    it.targetCompatibility = 1.8
+}
+
+dependencies {
+    compile project(':opencensus-api'),
+            project(':opencensus-impl-core'),
+            project(':opencensus-contrib-grpc-metrics'),
+            project(':opencensus-exporter-metrics-ocagent'),
+            project(':opencensus-exporter-trace-ocagent')
+
+    signature "org.codehaus.mojo.signature:java18:+@signature"
+}
diff --git a/contrib/observability_ready_util/src/main/java/io/opencensus/contrib/observability/ready/util/BasicSetup.java b/contrib/observability_ready_util/src/main/java/io/opencensus/contrib/observability/ready/util/BasicSetup.java
new file mode 100644
index 0000000..d61a9f7
--- /dev/null
+++ b/contrib/observability_ready_util/src/main/java/io/opencensus/contrib/observability/ready/util/BasicSetup.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2020, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.observability.ready.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.grpc.metrics.RpcViews;
+import io.opencensus.exporter.metrics.ocagent.OcAgentMetricsExporter;
+import io.opencensus.exporter.metrics.ocagent.OcAgentMetricsExporterConfiguration;
+import io.opencensus.exporter.trace.ocagent.OcAgentTraceExporter;
+import io.opencensus.exporter.trace.ocagent.OcAgentTraceExporterConfiguration;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.config.TraceParams;
+import io.opencensus.trace.samplers.Samplers;
+
+/**
+ * Setup class to enable OpenCensus stats and metrics collections easily.
+ *
+ * @since 0.25
+ */
+@ExperimentalApi
+public final class BasicSetup {
+  private static final double DEAFULT_SAMPLING_RATE = 0.0001;
+  private static final String DEAFULT_ENDPOINT = "localhost:55678";
+  private static final String DEAFULT_SERVICE_NAME = "OpenCensus";
+
+  private BasicSetup() {}
+
+  /**
+   * Enables OpenCensus metric and traces.
+   *
+   * <p>This will register all basic {@link io.opencensus.stats.View}s. When coupled with an agent,
+   * it allows users to monitor application behavior.
+   *
+   * <p>Example usage for maven:
+   *
+   * <pre>{@code
+   * <dependency>
+   *   <groupId>io.opencensus</groupId>
+   *   <artifactId>opencensus-contrib-observability-ready-util</artifactId>
+   *   <version>${opencensus.version}</version>
+   * </dependency>
+   * }</pre>
+   *
+   * <p>It is recommended to call this method before doing any RPC call to avoid missing stats.
+   *
+   * <pre>{@code
+   * BasicSetup.enableOpenCensus();
+   * }</pre>
+   *
+   * @param endPoint the end point of OC-Agent.
+   * @param probability the desired probability of sampling. Must be within [0.0, 1.0].
+   * @since 0.25
+   */
+  public static void enableOpenCensus(String endPoint, double probability) {
+    // register basic rpc views
+    RpcViews.registerAllGrpcBasicViews();
+
+    // set sampling rate
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    TraceParams activeTraceParams = traceConfig.getActiveTraceParams();
+    traceConfig.updateActiveTraceParams(
+        activeTraceParams.toBuilder().setSampler(Samplers.probabilitySampler(probability)).build());
+
+    String serviceName = firstNonNull(System.getenv("SERVICE_NAME"), DEAFULT_SERVICE_NAME);
+    // create and register Trace Agent Exporter
+    OcAgentTraceExporter.createAndRegister(
+        OcAgentTraceExporterConfiguration.builder()
+            .setEndPoint(endPoint)
+            .setServiceName(serviceName)
+            .setUseInsecure(true)
+            .setEnableConfig(false)
+            .build());
+
+    // create and register Metrics Agent Exporter
+    OcAgentMetricsExporter.createAndRegister(
+        OcAgentMetricsExporterConfiguration.builder()
+            .setEndPoint(endPoint)
+            .setServiceName(serviceName)
+            .setUseInsecure(true)
+            .build());
+  }
+
+  /**
+   * Enables OpenCensus metric and traces with default endPoint and Sampling rate.
+   *
+   * @since 0.25
+   */
+  public static void enableOpenCensus() {
+    enableOpenCensus(DEAFULT_ENDPOINT, DEAFULT_SAMPLING_RATE);
+  }
+}
diff --git a/contrib/monitored_resource_util/README.md b/contrib/resource_util/README.md
similarity index 60%
rename from contrib/monitored_resource_util/README.md
rename to contrib/resource_util/README.md
index 9d3c754..61d854b 100644
--- a/contrib/monitored_resource_util/README.md
+++ b/contrib/resource_util/README.md
@@ -1,10 +1,11 @@
-# OpenCensus Monitored Resources Util
+# OpenCensus Resources Util
 [![Build Status][travis-image]][travis-url]
 [![Windows Build Status][appveyor-image]][appveyor-url]
 [![Maven Central][maven-image]][maven-url]
 
-The *OpenCensus Monitored Resource Util for Java* is a collection of utilities for auto detecting
-monitored resource when exporting stats, based on the environment where the application is running.
+The *OpenCensus Resource Util for Java* is a collection of utilities that defines a set of 
+common resources (aws_ec2_instance, gcp_gce_instance, k8s_container, etc.) and offers auto detection
+for some of the resources, based on the environment where the application is running.
 
 ## Quickstart
 
@@ -15,20 +16,20 @@
 <dependencies>
   <dependency>
     <groupId>io.opencensus</groupId>
-    <artifactId>opencensus-contrib-monitored-resource-util</artifactId>
-    <version>0.16.1</version>
+    <artifactId>opencensus-contrib-resource-util</artifactId>
+    <version>0.20.0</version>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-contrib-monitored-resource-util:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-contrib-resource-util:0.20.0'
 ```
 
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
 [travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
 [appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
 [appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
-[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-monitoredresource-util/badge.svg
-[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-monitoredresource-util
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-resource-util/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-resource-util
diff --git a/contrib/monitored_resource_util/build.gradle b/contrib/resource_util/build.gradle
similarity index 68%
copy from contrib/monitored_resource_util/build.gradle
copy to contrib/resource_util/build.gradle
index 1e25c7c..325a0df 100644
--- a/contrib/monitored_resource_util/build.gradle
+++ b/contrib/resource_util/build.gradle
@@ -1,4 +1,4 @@
-description = 'OpenCensus Monitored Resource Util'
+description = 'OpenCensus Resource Util'
 
 apply plugin: 'java'
 
@@ -8,7 +8,9 @@
 }
 
 dependencies {
-    compileOnly libraries.auto_value
+    compile project(':opencensus-api'),
+            libraries.guava,
+            libraries.jsr305
 
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
diff --git a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtils.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtils.java
similarity index 77%
rename from contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtils.java
rename to contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtils.java
index 03b0bd4..37bfa41 100644
--- a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtils.java
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtils.java
@@ -14,7 +14,9 @@
  * limitations under the License.
  */
 
-package io.opencensus.contrib.monitoredresource.util;
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -24,14 +26,13 @@
 import java.net.URI;
 import java.nio.CharBuffer;
 import java.nio.charset.Charset;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
-import javax.annotation.concurrent.GuardedBy;
 
 /** Util methods for getting and parsing AWS instance identity document. */
 final class AwsIdentityDocUtils {
 
-  private static final Object monitor = new Object();
   private static final int AWS_IDENTITY_DOC_BUF_SIZE = 0x800; // 2K chars (4K bytes)
   private static final String AWS_IDENTITY_DOC_LINE_BREAK_SPLITTER = "\n";
   private static final String AWS_IDENTITY_DOC_COLON_SPLITTER = ":";
@@ -39,35 +40,22 @@
   private static final URI AWS_INSTANCE_IDENTITY_DOCUMENT_URI =
       URI.create("http://169.254.169.254/latest/dynamic/instance-identity/document");
 
-  @GuardedBy("monitor")
-  @javax.annotation.Nullable
-  private static Map<String, String> awsEnvVarMap = null;
+  private static final Map<String, String> awsEnvVarMap = initializeAwsIdentityDocument();
 
-  // Detects if the application is running on EC2 by making a connection to AWS instance
-  // identity document URI. If connection is successful, application should be on an EC2 instance.
-  private static volatile boolean isRunningOnAwsEc2 = false;
-
-  static {
-    initializeAwsIdentityDocument();
-  }
-
-  static boolean isRunningOnAwsEc2() {
-    return isRunningOnAwsEc2;
+  static boolean isRunningOnAws() {
+    return !awsEnvVarMap.isEmpty();
   }
 
   // Tries to establish an HTTP connection to AWS instance identity document url. If the application
   // is running on an EC2 instance, we should be able to get back a valid JSON document. Parses that
   // document and stores the identity properties in a local map.
   // This method should only be called once.
-  private static void initializeAwsIdentityDocument() {
+  private static Map<String, String> initializeAwsIdentityDocument() {
     InputStream stream = null;
     try {
       stream = openStream(AWS_INSTANCE_IDENTITY_DOCUMENT_URI);
       String awsIdentityDocument = slurp(new InputStreamReader(stream, Charset.forName("UTF-8")));
-      synchronized (monitor) {
-        awsEnvVarMap = parseAwsIdentityDocument(awsIdentityDocument);
-      }
-      isRunningOnAwsEc2 = true;
+      return parseAwsIdentityDocument(awsIdentityDocument);
     } catch (IOException e) {
       // Cannot connect to http://169.254.169.254/latest/dynamic/instance-identity/document.
       // Not on an AWS EC2 instance.
@@ -80,6 +68,7 @@
         }
       }
     }
+    return Collections.emptyMap();
   }
 
   /** quick http client that allows no-dependency try at getting instance data. */
@@ -123,14 +112,31 @@
     return map;
   }
 
-  @javax.annotation.Nullable
-  static String getValueFromAwsIdentityDocument(String key) {
-    synchronized (monitor) {
-      if (awsEnvVarMap == null) {
-        return null;
-      }
-      return awsEnvVarMap.get(key);
+  private static String getValueFromAwsIdentityDocument(String key) {
+    if (awsEnvVarMap == null) {
+      return "";
     }
+    return firstNonNull(awsEnvVarMap.get(key), "");
+  }
+
+  static String getAccountId() {
+    return getValueFromAwsIdentityDocument("accountId");
+  }
+
+  static String getRegion() {
+    return getValueFromAwsIdentityDocument("region");
+  }
+
+  static String getAvailabilityZone() {
+    return getValueFromAwsIdentityDocument("availabilityZone");
+  }
+
+  static String getInstanceId() {
+    return getValueFromAwsIdentityDocument("instanceId");
+  }
+
+  static String getMachineType() {
+    return getValueFromAwsIdentityDocument("instanceType");
   }
 
   private AwsIdentityDocUtils() {}
diff --git a/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/CloudResource.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/CloudResource.java
new file mode 100644
index 0000000..d6221ee
--- /dev/null
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/CloudResource.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static io.opencensus.contrib.resource.util.ResourceUtils.EMPTY_RESOURCE;
+
+import io.opencensus.resource.Resource;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Helper class for Cloud {@code Resource} environment.
+ *
+ * @since 0.20
+ */
+public final class CloudResource {
+  /**
+   * The type of this {@code Resource}.
+   *
+   * @since 0.20
+   */
+  public static final String TYPE = "cloud";
+
+  /**
+   * Key for the name of the cloud provider. Example values are aws, azure, gcp.
+   *
+   * @since 0.20
+   */
+  public static final String PROVIDER_KEY = "cloud.provider";
+
+  /**
+   * The value of the provider when running in AWS.
+   *
+   * @since 0.20
+   */
+  public static final String PROVIDER_AWS = "aws";
+
+  /**
+   * The value of the provider when running in AZURE.
+   *
+   * @since 0.20
+   */
+  public static final String PROVIDER_AZURE = "azure";
+
+  /**
+   * The value of the provider when running in GCP.
+   *
+   * @since 0.20
+   */
+  public static final String PROVIDER_GCP = "gcp";
+
+  /**
+   * Key for the cloud account id used to identify different entities.
+   *
+   * @since 0.20
+   */
+  public static final String ACCOUNT_ID_KEY = "cloud.account.id";
+
+  /**
+   * Key for the region in which entities are running.
+   *
+   * @since 0.20
+   */
+  public static final String REGION_KEY = "cloud.region";
+
+  /**
+   * Key for the zone in which entities are running.
+   *
+   * @since 0.20
+   */
+  public static final String ZONE_KEY = "cloud.zone";
+
+  /**
+   * Returns a {@link Resource} that describes a cloud environment.
+   *
+   * @param provider the name of the cloud provider.
+   * @param accountId the cloud account id used to identify different entities.
+   * @param region the region in which entities are running.
+   * @param zone the zone in which entities are running.
+   * @return a {@link Resource} that describes a aws ec2 instance.
+   * @since 0.20
+   */
+  public static Resource create(String provider, String accountId, String region, String zone) {
+    Map<String, String> labels = new LinkedHashMap<String, String>();
+    labels.put(PROVIDER_KEY, checkNotNull(provider, "provider"));
+    labels.put(ACCOUNT_ID_KEY, checkNotNull(accountId, "accountId"));
+    labels.put(REGION_KEY, checkNotNull(region, "availabilityZone"));
+    labels.put(ZONE_KEY, checkNotNull(zone, "zone"));
+    return Resource.create(TYPE, labels);
+  }
+
+  static Resource detect() {
+    if (AwsIdentityDocUtils.isRunningOnAws()) {
+      return create(
+          PROVIDER_AWS,
+          AwsIdentityDocUtils.getAccountId(),
+          AwsIdentityDocUtils.getRegion(),
+          AwsIdentityDocUtils.getAvailabilityZone());
+    }
+    if (GcpMetadataConfig.isRunningOnGcp()) {
+      return create(
+          PROVIDER_GCP, GcpMetadataConfig.getProjectId(), "", GcpMetadataConfig.getZone());
+    }
+    // TODO: Add support for PROVIDER_AZURE.
+    return EMPTY_RESOURCE;
+  }
+
+  private CloudResource() {}
+}
diff --git a/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ContainerResource.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ContainerResource.java
new file mode 100644
index 0000000..72931d0
--- /dev/null
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ContainerResource.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import io.opencensus.resource.Resource;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Helper class for K8S container {@code Resource}.
+ *
+ * @since 0.20
+ */
+public class ContainerResource {
+  /**
+   * Kubernetes resources key that represents a type of the resource.
+   *
+   * @since 0.20
+   */
+  public static final String TYPE = "container";
+
+  /**
+   * Key for the container name.
+   *
+   * @since 0.20
+   */
+  public static final String NAME_KEY = "container.name";
+
+  /**
+   * Key for the container image name.
+   *
+   * @since 0.20
+   */
+  public static final String IMAGE_NAME_KEY = "container.image.name";
+
+  /**
+   * Key for the container image tag.
+   *
+   * @since 0.20
+   */
+  public static final String IMAGE_TAG_KEY = "container.image.tag";
+
+  /**
+   * Returns a {@link Resource} that describes a container.
+   *
+   * @param name the container name.
+   * @param imageName the container image name.
+   * @param imageTag the container image tag.
+   * @return a {@link Resource} that describes a k8s container.
+   * @since 0.20
+   */
+  public static Resource create(String name, String imageName, String imageTag) {
+    Map<String, String> labels = new LinkedHashMap<String, String>();
+    labels.put(NAME_KEY, checkNotNull(name, "name"));
+    labels.put(IMAGE_NAME_KEY, checkNotNull(imageName, "imageName"));
+    labels.put(IMAGE_TAG_KEY, checkNotNull(imageTag, "imageTag"));
+    return Resource.create(TYPE, labels);
+  }
+
+  static Resource detect() {
+    // TODO: Add support to auto-detect IMAGE_NAME_KEY and IMAGE_TAG_KEY.
+    return create(firstNonNull(System.getenv("CONTAINER_NAME"), ""), "", "");
+  }
+
+  private ContainerResource() {}
+}
diff --git a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/GcpMetadataConfig.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/GcpMetadataConfig.java
similarity index 70%
rename from contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/GcpMetadataConfig.java
rename to contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/GcpMetadataConfig.java
index c09d1c6..d0ee8f2 100644
--- a/contrib/monitored_resource_util/src/main/java/io/opencensus/contrib/monitoredresource/util/GcpMetadataConfig.java
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/GcpMetadataConfig.java
@@ -14,7 +14,9 @@
  * limitations under the License.
  */
 
-package io.opencensus.contrib.monitoredresource.util;
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -23,7 +25,6 @@
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.nio.charset.Charset;
-import javax.annotation.Nullable;
 
 /**
  * Retrieves Google Cloud project-id and a limited set of instance attributes from Metadata server.
@@ -33,38 +34,50 @@
  */
 final class GcpMetadataConfig {
 
-  private static final String METADATA_URL = "http://metadata/computeMetadata/v1/";
+  private static final String METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/";
 
   private GcpMetadataConfig() {}
 
-  @Nullable
+  static boolean isRunningOnGcp() {
+    return !getProjectId().isEmpty();
+  }
+
   static String getProjectId() {
     return getAttribute("project/project-id");
   }
 
-  @Nullable
   static String getZone() {
-    String zoneId = getAttribute("instance/zone");
-    if (zoneId == null) {
-      return null;
+    String zone = getAttribute("instance/zone");
+    if (zone.contains("/")) {
+      return zone.substring(zone.lastIndexOf('/') + 1);
     }
-    if (zoneId.contains("/")) {
-      return zoneId.substring(zoneId.lastIndexOf('/') + 1);
-    }
-    return zoneId;
+    return zone;
   }
 
-  @Nullable
+  static String getMachineType() {
+    String machineType = getAttribute("instance/machine-type");
+    if (machineType.contains("/")) {
+      return machineType.substring(machineType.lastIndexOf('/') + 1);
+    }
+    return machineType;
+  }
+
   static String getInstanceId() {
     return getAttribute("instance/id");
   }
 
-  @Nullable
   static String getClusterName() {
     return getAttribute("instance/attributes/cluster-name");
   }
 
-  @Nullable
+  static String getInstanceName() {
+    return getAttribute("instance/hostname");
+  }
+
+  static String getInstanceHostname() {
+    return getAttribute("instance/name");
+  }
+
   private static String getAttribute(String attributeName) {
     try {
       URL url = new URL(METADATA_URL + attributeName);
@@ -75,7 +88,7 @@
         BufferedReader reader = null;
         try {
           reader = new BufferedReader(new InputStreamReader(input, Charset.forName("UTF-8")));
-          return reader.readLine();
+          return firstNonNull(reader.readLine(), "");
         } finally {
           if (reader != null) {
             reader.close();
@@ -85,6 +98,6 @@
     } catch (IOException ignore) {
       // ignore
     }
-    return null;
+    return "";
   }
 }
diff --git a/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/HostResource.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/HostResource.java
new file mode 100644
index 0000000..5605fe8
--- /dev/null
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/HostResource.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static io.opencensus.contrib.resource.util.ResourceUtils.EMPTY_RESOURCE;
+
+import io.opencensus.resource.Resource;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Helper class for Host {@code Resource}. A host is defined as a general computing instance.
+ *
+ * @since 0.20
+ */
+public final class HostResource {
+  /**
+   * The type of this {@code Resource}.
+   *
+   * @since 0.20
+   */
+  public static final String TYPE = "host";
+
+  /**
+   * Key for the hostname of the host.
+   *
+   * <p>It contains what the `hostname` command returns on the host machine.
+   *
+   * @since 0.20
+   */
+  public static final String HOSTNAME_KEY = "host.hostname";
+
+  /**
+   * Key for the name of the host.
+   *
+   * <p>It may contain what `hostname` returns on Unix systems, the fully qualified, or a name
+   * specified by the user.
+   *
+   * @since 0.20
+   */
+  public static final String NAME_KEY = "host.name";
+
+  /**
+   * Key for the unique host id (instance id in Cloud).
+   *
+   * @since 0.20
+   */
+  public static final String ID_KEY = "host.id";
+
+  /**
+   * Key for the type of the host (machine type).
+   *
+   * @since 0.20
+   */
+  public static final String TYPE_KEY = "host.type";
+
+  /**
+   * Returns a {@link Resource} that describes a k8s container.
+   *
+   * @param hostname the hostname of the host.
+   * @param name the name of the host.
+   * @param id the unique host id (instance id in Cloud).
+   * @param type the type of the host (machine type).
+   * @return a {@link Resource} that describes a k8s container.
+   * @since 0.20
+   */
+  public static Resource create(String hostname, String name, String id, String type) {
+    Map<String, String> labels = new LinkedHashMap<String, String>();
+    labels.put(HOSTNAME_KEY, checkNotNull(hostname, "hostname"));
+    labels.put(NAME_KEY, checkNotNull(name, "name"));
+    labels.put(ID_KEY, checkNotNull(id, "id"));
+    labels.put(TYPE_KEY, checkNotNull(type, "type"));
+    return Resource.create(TYPE, labels);
+  }
+
+  static Resource detect() {
+    if (AwsIdentityDocUtils.isRunningOnAws()) {
+      return create(
+          "", "", AwsIdentityDocUtils.getInstanceId(), AwsIdentityDocUtils.getMachineType());
+    }
+    if (GcpMetadataConfig.isRunningOnGcp()) {
+      return create(
+          GcpMetadataConfig.getInstanceHostname(),
+          GcpMetadataConfig.getInstanceName(),
+          GcpMetadataConfig.getInstanceId(),
+          GcpMetadataConfig.getMachineType());
+    }
+    return EMPTY_RESOURCE;
+  }
+
+  private HostResource() {}
+}
diff --git a/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/K8sResource.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/K8sResource.java
new file mode 100644
index 0000000..6caa67f
--- /dev/null
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/K8sResource.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Splitter;
+import io.opencensus.resource.Resource;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Helper class for Kubernetes deployment service {@code Resource}.
+ *
+ * @since 0.20
+ */
+public class K8sResource {
+  /**
+   * The type of this {@code Resource}.
+   *
+   * @since 0.20
+   */
+  public static final String TYPE = "k8s";
+
+  /**
+   * Key for the name of the cluster.
+   *
+   * @since 0.20
+   */
+  public static final String CLUSTER_NAME_KEY = "k8s.cluster.name";
+
+  /**
+   * Key for the name of the namespace.
+   *
+   * @since 0.20
+   */
+  public static final String NAMESPACE_NAME_KEY = "k8s.namespace.name";
+
+  /**
+   * Key for the name of the pod.
+   *
+   * @since 0.20
+   */
+  public static final String POD_NAME_KEY = "k8s.pod.name";
+
+  /**
+   * Key for the name of the deployment.
+   *
+   * @since 0.24
+   */
+  public static final String DEPLOYMENT_NAME_KEY = "k8s.deployment.name";
+
+  private static final Splitter splitter = Splitter.on('-');
+
+  /**
+   * Returns a {@link Resource} that describes Kubernetes deployment service.
+   *
+   * @param clusterName the k8s cluster name.
+   * @param namespace the k8s namespace.
+   * @param podName the k8s pod name.
+   * @return a {@link Resource} that describes a k8s container.
+   * @since 0.20
+   * @deprecated in favor of {@link #create(String, String, String, String)}.
+   */
+  @Deprecated
+  public static Resource create(String clusterName, String namespace, String podName) {
+    return create(clusterName, namespace, podName, "");
+  }
+
+  /**
+   * Returns a {@link Resource} that describes Kubernetes deployment service.
+   *
+   * @param clusterName the k8s cluster name.
+   * @param namespace the k8s namespace.
+   * @param podName the k8s pod name.
+   * @param deploymentName the k8s deployment name.
+   * @return a {@link Resource} that describes a k8s container.
+   * @since 0.24
+   */
+  public static Resource create(
+      String clusterName, String namespace, String podName, String deploymentName) {
+    Map<String, String> labels = new LinkedHashMap<String, String>();
+    labels.put(CLUSTER_NAME_KEY, checkNotNull(clusterName, "clusterName"));
+    labels.put(NAMESPACE_NAME_KEY, checkNotNull(namespace, "namespace"));
+    labels.put(POD_NAME_KEY, checkNotNull(podName, "podName"));
+    labels.put(DEPLOYMENT_NAME_KEY, checkNotNull(deploymentName, "deploymentName"));
+    return Resource.create(TYPE, labels);
+  }
+
+  static Resource detect() {
+    String podName = firstNonNull(System.getenv("HOSTNAME"), "");
+    String deploymentName = getDeploymentNameFromPodName(podName);
+    return create(
+        GcpMetadataConfig.getClusterName(),
+        firstNonNull(System.getenv("NAMESPACE"), ""),
+        podName,
+        deploymentName);
+  }
+
+  @VisibleForTesting
+  static String getDeploymentNameFromPodName(String podName) {
+    StringBuilder deploymentName = new StringBuilder();
+    // Extract deployment name from the pod name. Pod name is created using
+    // format: [deployment-name]-[Random-String-For-ReplicaSet]-[Random-String-For-Pod]
+    List<String> parts = splitter.splitToList(podName);
+    if (parts.size() == 3) {
+      deploymentName.append(parts.get(0));
+    } else if (parts.size() > 3) { // Deployment name could also contain '-'
+      for (int i = 0; i < parts.size() - 2; i++) {
+        if (deploymentName.length() > 0) {
+          deploymentName.append('-');
+        }
+        deploymentName.append(parts.get(i));
+      }
+    }
+    return deploymentName.toString();
+  }
+
+  private K8sResource() {}
+}
diff --git a/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ResourceUtils.java b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ResourceUtils.java
new file mode 100644
index 0000000..244bcf4
--- /dev/null
+++ b/contrib/resource_util/src/main/java/io/opencensus/contrib/resource/util/ResourceUtils.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+
+import io.opencensus.resource.Resource;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utilities for auto detecting resource based on the environment where the application is running.
+ *
+ * @since 0.20
+ */
+public final class ResourceUtils {
+  static final Resource EMPTY_RESOURCE =
+      Resource.create(null, Collections.<String, String>emptyMap());
+
+  /**
+   * Returns a {@code Resource}. Detector sequentially runs resource detection from environment
+   * variables, K8S, GCE and AWS.
+   *
+   * @return a {@code Resource}.
+   * @since 0.20
+   */
+  public static Resource detectResource() {
+    List<Resource> resourceList = new ArrayList<Resource>();
+    resourceList.add(Resource.createFromEnvironmentVariables());
+    if (System.getenv("KUBERNETES_SERVICE_HOST") != null) {
+      resourceList.add(ContainerResource.detect());
+      resourceList.add(K8sResource.detect());
+    }
+    resourceList.add(HostResource.detect());
+    resourceList.add(CloudResource.detect());
+    return firstNonNull(Resource.mergeResources(resourceList), EMPTY_RESOURCE);
+  }
+
+  private ResourceUtils() {}
+}
diff --git a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtilsTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtilsTest.java
similarity index 97%
rename from contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtilsTest.java
rename to contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtilsTest.java
index 77d9849..76e7331 100644
--- a/contrib/monitored_resource_util/src/test/java/io/opencensus/contrib/monitoredresource/util/AwsIdentityDocUtilsTest.java
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/AwsIdentityDocUtilsTest.java
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package io.opencensus.contrib.monitoredresource.util;
+package io.opencensus.contrib.resource.util;
 
 import static com.google.common.truth.Truth.assertThat;
 
diff --git a/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/CloudResourceTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/CloudResourceTest.java
new file mode 100644
index 0000000..e06d6aa
--- /dev/null
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/CloudResourceTest.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.resource.Resource;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link CloudResource}. */
+@RunWith(JUnit4.class)
+public class CloudResourceTest {
+  private static final String PROVIDER = "provider";
+  private static final String ACCOUNT_ID = "account_id";
+  private static final String REGION = "region";
+  private static final String ZONE = "zone";
+
+  @Test
+  public void create_ContainerResourceTest() {
+    Resource resource = CloudResource.create(PROVIDER, ACCOUNT_ID, REGION, ZONE);
+    assertThat(resource.getType()).isEqualTo(CloudResource.TYPE);
+    assertThat(resource.getLabels())
+        .containsExactly(
+            CloudResource.PROVIDER_KEY,
+            PROVIDER,
+            CloudResource.ACCOUNT_ID_KEY,
+            ACCOUNT_ID,
+            CloudResource.REGION_KEY,
+            REGION,
+            CloudResource.ZONE_KEY,
+            ZONE);
+  }
+}
diff --git a/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ContainerResourceTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ContainerResourceTest.java
new file mode 100644
index 0000000..a3026ab
--- /dev/null
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ContainerResourceTest.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.resource.Resource;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link ContainerResource}. */
+@RunWith(JUnit4.class)
+public class ContainerResourceTest {
+  private static final String NAME = "container";
+  private static final String IMAGE_NAME = "image_name";
+  private static final String IMAGE_TAG = "image_tag";
+
+  @Test
+  public void create_ContainerResourceTest() {
+    Resource resource = ContainerResource.create(NAME, IMAGE_NAME, IMAGE_TAG);
+    assertThat(resource.getType()).isEqualTo(ContainerResource.TYPE);
+    assertThat(resource.getLabels())
+        .containsExactly(
+            ContainerResource.NAME_KEY,
+            NAME,
+            ContainerResource.IMAGE_NAME_KEY,
+            IMAGE_NAME,
+            ContainerResource.IMAGE_TAG_KEY,
+            IMAGE_TAG);
+  }
+}
diff --git a/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/HostResourceTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/HostResourceTest.java
new file mode 100644
index 0000000..fc82746
--- /dev/null
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/HostResourceTest.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.resource.Resource;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link HostResource}. */
+@RunWith(JUnit4.class)
+public class HostResourceTest {
+  private static final String HOSTNAME = "hostname";
+  private static final String NAME = "name";
+  private static final String ID = "id";
+  private static final String TYPE = "type";
+
+  @Test
+  public void create_ContainerResourceTest() {
+    Resource resource = HostResource.create(HOSTNAME, NAME, ID, TYPE);
+    assertThat(resource.getType()).isEqualTo(HostResource.TYPE);
+    assertThat(resource.getLabels())
+        .containsExactly(
+            HostResource.HOSTNAME_KEY,
+            HOSTNAME,
+            HostResource.NAME_KEY,
+            NAME,
+            HostResource.ID_KEY,
+            ID,
+            HostResource.TYPE_KEY,
+            TYPE);
+  }
+}
diff --git a/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/K8sResourceTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/K8sResourceTest.java
new file mode 100644
index 0000000..0ed4f26
--- /dev/null
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/K8sResourceTest.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.resource.Resource;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link K8sResource}. */
+@RunWith(JUnit4.class)
+public class K8sResourceTest {
+  private static final String K8S_CLUSTER_NAME = "cluster";
+  private static final String K8S_NAMESPACE_NAME = "namespace";
+  private static final String K8S_POD_NAME = "deployment-replica-pod";
+  private static final String K8S_DEPLOYMENT_NAME = "deployment";
+
+  @Test
+  public void create_K8sContainerResourceTest_Deprecated() {
+    Resource resource = K8sResource.create(K8S_CLUSTER_NAME, K8S_NAMESPACE_NAME, K8S_POD_NAME);
+    assertThat(resource.getType()).isEqualTo(K8sResource.TYPE);
+    assertThat(resource.getLabels())
+        .containsExactly(
+            K8sResource.CLUSTER_NAME_KEY,
+            K8S_CLUSTER_NAME,
+            K8sResource.NAMESPACE_NAME_KEY,
+            K8S_NAMESPACE_NAME,
+            K8sResource.POD_NAME_KEY,
+            K8S_POD_NAME,
+            K8sResource.DEPLOYMENT_NAME_KEY,
+            "");
+  }
+
+  @Test
+  public void create_K8sContainerResourceTest() {
+    Resource resource =
+        K8sResource.create(K8S_CLUSTER_NAME, K8S_NAMESPACE_NAME, K8S_POD_NAME, K8S_DEPLOYMENT_NAME);
+    assertThat(resource.getType()).isEqualTo(K8sResource.TYPE);
+    assertThat(resource.getLabels())
+        .containsExactly(
+            K8sResource.CLUSTER_NAME_KEY,
+            K8S_CLUSTER_NAME,
+            K8sResource.NAMESPACE_NAME_KEY,
+            K8S_NAMESPACE_NAME,
+            K8sResource.POD_NAME_KEY,
+            K8S_POD_NAME,
+            K8sResource.DEPLOYMENT_NAME_KEY,
+            K8S_DEPLOYMENT_NAME);
+  }
+
+  @Test
+  public void getDeploymentNameFromPodName() {
+    assertThat(K8sResource.getDeploymentNameFromPodName(K8S_POD_NAME))
+        .isEqualTo(K8S_DEPLOYMENT_NAME);
+    assertThat(K8sResource.getDeploymentNameFromPodName("")).isEqualTo("");
+    assertThat(K8sResource.getDeploymentNameFromPodName("simple-name")).isEqualTo("");
+    assertThat(K8sResource.getDeploymentNameFromPodName("deployment-name-replica-pod"))
+        .isEqualTo("deployment-name");
+  }
+}
diff --git a/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ResourceUtilsTest.java b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ResourceUtilsTest.java
new file mode 100644
index 0000000..b939992
--- /dev/null
+++ b/contrib/resource_util/src/test/java/io/opencensus/contrib/resource/util/ResourceUtilsTest.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.resource.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.resource.Resource;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Tests for {@link ResourceUtils}. */
+@RunWith(JUnit4.class)
+public class ResourceUtilsTest {
+
+  @Test
+  public void testDetectResource() {
+    Resource resource = ResourceUtils.detectResource();
+    if (System.getenv("KUBERNETES_SERVICE_HOST") != null) {
+      assertThat(resource.getType()).isEqualTo(K8sResource.TYPE);
+    } else if (GcpMetadataConfig.isRunningOnGcp()) {
+      assertThat(resource.getType()).isEqualTo(HostResource.TYPE);
+      assertThat(resource.getLabels().get(CloudResource.PROVIDER_KEY))
+          .isEqualTo(CloudResource.PROVIDER_GCP);
+    } else if (AwsIdentityDocUtils.isRunningOnAws()) {
+      assertThat(resource.getType()).isEqualTo(HostResource.TYPE);
+      assertThat(resource.getLabels().get(CloudResource.PROVIDER_KEY))
+          .isEqualTo(CloudResource.PROVIDER_AWS);
+    } else {
+      assertThat(resource).isNotNull();
+      assertThat(resource.getType()).isNull();
+      assertThat(resource.getLabels()).isEmpty();
+    }
+  }
+}
diff --git a/contrib/spring/README.md b/contrib/spring/README.md
index 8c74029..7a7a59e 100644
--- a/contrib/spring/README.md
+++ b/contrib/spring/README.md
@@ -18,17 +18,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-spring</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
   
@@ -44,10 +44,10 @@
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-contrib-spring:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-spring:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 runtime 'org.springframework:spring-aspects:SPRING_VERSION'
 ```
 
diff --git a/contrib/spring/build.gradle b/contrib/spring/build.gradle
index 941afcc..068d10c 100644
--- a/contrib/spring/build.gradle
+++ b/contrib/spring/build.gradle
@@ -3,19 +3,27 @@
 apply plugin: 'java'
 
 [compileJava, compileTestJava].each() {
-    it.sourceCompatibility = 1.6
-    it.targetCompatibility = 1.6
+    it.sourceCompatibility = 1.8
+    it.targetCompatibility = 1.8
 }
 
 dependencies {
     compile project(':opencensus-api'),
+            project(':opencensus-contrib-http-util'),
+            project(':opencensus-contrib-http-servlet'),
             libraries.spring_aspects,
-            libraries.spring_context
+            libraries.spring_context,
+            libraries.findbugs_annotations,
+            libraries.spring_boot_starter_web2,
+            libraries.httpcomponents
 
     testCompile project(':opencensus-impl'),
             project(':opencensus-testing'),
             libraries.aspectj,
-            libraries.spring_test
+            libraries.spring_test,
+            libraries.sprint_boot_starter_tomcat,
+            libraries.spring_boot_test2,
+            libraries.javax_servlet
 
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
 }
diff --git a/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusAutoConfiguration.java b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusAutoConfiguration.java
new file mode 100644
index 0000000..36e9b83
--- /dev/null
+++ b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusAutoConfiguration.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.autoconfig;
+
+import io.opencensus.common.ExperimentalApi;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.Ordered;
+
+/**
+ * {@link org.springframework.boot.autoconfigure.EnableAutoConfiguration Auto-configuration} to
+ * enable tracing using OpenCensus.
+ *
+ * @since 0.23.0
+ */
+@Configuration
+@ComponentScan(basePackages = "io.opencensus")
+@ConditionalOnProperty(value = "opencensus.spring.enabled", matchIfMissing = true)
+@EnableConfigurationProperties(OpenCensusProperties.class)
+@ExperimentalApi
+public class OpenCensusAutoConfiguration {
+
+  /**
+   * TRACE_FILTER_ORDER determines the order in which {@link
+   * io.opencensus.contrib.spring.instrument.web.HttpServletFilter} is invoked. In order to capture
+   * accurate request processing latency it is desirable that the filter is invoked as early as
+   * possible. However, there may be some security related filters that my need to execute before,
+   * hence +5 is added.
+   */
+  public static final int TRACE_FILTER_ORDER = Ordered.HIGHEST_PRECEDENCE + 5;
+}
diff --git a/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusProperties.java b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusProperties.java
new file mode 100644
index 0000000..9f8acee
--- /dev/null
+++ b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/OpenCensusProperties.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.autoconfig;
+
+import static io.opencensus.contrib.spring.autoconfig.OpenCensusProperties.Trace.Propagation.TRACE_PROPAGATION_TRACE_CONTEXT;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * Opencensus settings.
+ *
+ * @since 0.23.0
+ */
+@ConfigurationProperties("opencensus.spring")
+public class OpenCensusProperties {
+
+  private boolean enabled = true;
+  private Trace trace = new Trace();
+
+  public boolean isEnabled() {
+    return this.enabled;
+  }
+
+  public void setEnabled(boolean enabled) {
+    this.enabled = enabled;
+  }
+
+  public Trace getTrace() {
+    return this.trace;
+  }
+
+  public void setTrace(Trace trace) {
+    this.trace = trace;
+  }
+
+  /**
+   * Trace properties.
+   *
+   * @since 0.23
+   */
+  public static final class Trace {
+
+    public enum Propagation {
+      /**
+       * Specifies Trace Context format for span context propagation.
+       *
+       * @since 0.23
+       */
+      TRACE_PROPAGATION_TRACE_CONTEXT,
+
+      /**
+       * Specifies B3 format for span context propagation.
+       *
+       * @since 0.23
+       */
+      TRACE_PROPAGATION_B3,
+    }
+
+    private Propagation propagation = TRACE_PROPAGATION_TRACE_CONTEXT;
+    private boolean publicEndpoint = false;
+
+    public Propagation getPropagation() {
+      return propagation;
+    }
+
+    public void setPropagation(Propagation propagation) {
+      this.propagation = propagation;
+    }
+
+    public boolean isPublicEndpoint() {
+      return publicEndpoint;
+    }
+
+    public void setPublicEndpoint(boolean publicEndpoint) {
+      this.publicEndpoint = publicEndpoint;
+    }
+  }
+}
diff --git a/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/TraceWebAsyncClientAutoConfiguration.java b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/TraceWebAsyncClientAutoConfiguration.java
new file mode 100644
index 0000000..2ba46be
--- /dev/null
+++ b/contrib/spring/src/main/java/io/opencensus/contrib/spring/autoconfig/TraceWebAsyncClientAutoConfiguration.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.autoconfig;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.spring.autoconfig.OpenCensusProperties.Trace;
+import io.opencensus.contrib.spring.autoconfig.OpenCensusProperties.Trace.Propagation;
+import io.opencensus.contrib.spring.instrument.web.client.TracingAsyncClientHttpRequestInterceptor;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import javax.annotation.PostConstruct;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.AutoConfigureAfter;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.client.ClientHttpResponse;
+
+/**
+ * {@link org.springframework.boot.autoconfigure.EnableAutoConfiguration Auto-configuration} enables
+ * span information propagation for {@link org.springframework.web.client.AsyncRestTemplate}.
+ *
+ * @since 0.23.0
+ */
+@Configuration
+@ComponentScan(basePackages = "io.opencensus")
+@ConditionalOnProperty(value = "opencensus.spring.enabled", matchIfMissing = true)
+@ConditionalOnClass(org.springframework.web.client.AsyncRestTemplate.class)
+@EnableConfigurationProperties(OpenCensusProperties.class)
+@AutoConfigureAfter(OpenCensusAutoConfiguration.class)
+@ExperimentalApi
+@SuppressWarnings("deprecation")
+public class TraceWebAsyncClientAutoConfiguration {
+  @Configuration
+  @ConditionalOnBean(org.springframework.web.client.AsyncRestTemplate.class)
+  @SuppressWarnings("initialization.fields.uninitialized")
+  static class AsyncRestTemplateCfg {
+
+    @Value("${opencensus.spring.trace.propagation:TRACE_PROPAGATION_TRACE_CONTEXT}")
+    private Trace.Propagation propagation;
+
+    @Autowired(required = false)
+    HttpExtractor<HttpRequest, ClientHttpResponse> extractor;
+
+    @Bean
+    public TracingAsyncClientHttpRequestInterceptor asyncTracingClientHttpRequestInterceptor() {
+      TextFormat propagator;
+
+      if (propagation != null && propagation == Propagation.TRACE_PROPAGATION_B3) {
+        propagator = Tracing.getPropagationComponent().getB3Format();
+      } else {
+        propagator = Tracing.getPropagationComponent().getTraceContextFormat();
+      }
+      return (TracingAsyncClientHttpRequestInterceptor)
+          TracingAsyncClientHttpRequestInterceptor.create(propagator, extractor);
+    }
+  }
+
+  @Configuration
+  protected static class TraceInterceptorConfiguration {
+
+    @Autowired(required = false)
+    @SuppressWarnings("initialization.fields.uninitialized")
+    private Collection<org.springframework.web.client.AsyncRestTemplate> restTemplates;
+
+    @Autowired
+    @SuppressWarnings("initialization.fields.uninitialized")
+    private TracingAsyncClientHttpRequestInterceptor clientInterceptor;
+
+    @PostConstruct
+    public void init() {
+      if (restTemplates != null) {
+        for (org.springframework.web.client.AsyncRestTemplate restTemplate : restTemplates) {
+          List<org.springframework.http.client.AsyncClientHttpRequestInterceptor> interceptors =
+              new ArrayList<org.springframework.http.client.AsyncClientHttpRequestInterceptor>(
+                  restTemplate.getInterceptors());
+          interceptors.add(clientInterceptor);
+          restTemplate.setInterceptors(interceptors);
+        }
+      }
+    }
+  }
+}
diff --git a/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilter.java b/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilter.java
new file mode 100644
index 0000000..5dcc0ff
--- /dev/null
+++ b/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilter.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.instrument.web;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.opencensus.contrib.http.servlet.OcHttpServletFilter;
+import io.opencensus.contrib.spring.autoconfig.OpenCensusAutoConfiguration;
+import io.opencensus.contrib.spring.autoconfig.OpenCensusProperties.Trace;
+import io.opencensus.contrib.spring.autoconfig.OpenCensusProperties.Trace.Propagation;
+import io.opencensus.trace.Tracing;
+import javax.servlet.Filter;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+
+@Component
+@Order(OpenCensusAutoConfiguration.TRACE_FILTER_ORDER)
+@SuppressFBWarnings("RI_REDUNDANT_INTERFACES")
+@SuppressWarnings("initialization.fields.uninitialized")
+public class HttpServletFilter extends OcHttpServletFilter implements Filter {
+
+  @Value("${opencensus.spring.trace.propagation:TRACE_PROPAGATION_TRACE_CONTEXT}")
+  private Trace.Propagation propagation;
+
+  @Value("${opencensus.spring.trace.publicEndpoint:false}")
+  private Boolean publicEndpoint;
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+    ServletContext context = filterConfig.getServletContext();
+    if (propagation != null && propagation == Propagation.TRACE_PROPAGATION_B3) {
+      context.setAttribute(OC_TRACE_PROPAGATOR, Tracing.getPropagationComponent().getB3Format());
+    }
+    if (publicEndpoint) {
+      context.setInitParameter(OC_PUBLIC_ENDPOINT, publicEndpoint.toString());
+    }
+    super.init(filterConfig);
+  }
+}
diff --git a/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/client/TracingAsyncClientHttpRequestInterceptor.java b/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/client/TracingAsyncClientHttpRequestInterceptor.java
new file mode 100644
index 0000000..9f9ca62
--- /dev/null
+++ b/contrib/spring/src/main/java/io/opencensus/contrib/spring/instrument/web/client/TracingAsyncClientHttpRequestInterceptor.java
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.instrument.web.client;
+
+import io.opencensus.common.ExperimentalApi;
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.HttpClientHandler;
+import io.opencensus.contrib.http.HttpExtractor;
+import io.opencensus.contrib.http.HttpRequestContext;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import javax.annotation.Nullable;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.util.concurrent.ListenableFutureCallback;
+
+/**
+ * This class intercepts {@link org.springframework.web.client.AsyncRestTemplate}.
+ *
+ * @since 0.23.0
+ */
+@SuppressWarnings("deprecation")
+public final class TracingAsyncClientHttpRequestInterceptor
+    implements org.springframework.http.client.AsyncClientHttpRequestInterceptor {
+  final Tracer tracer;
+  final HttpClientHandler<HttpRequest, ClientHttpResponse, HttpRequest> handler;
+
+  private static final Setter<HttpRequest> setter =
+      new Setter<HttpRequest>() {
+        @Override
+        public void put(HttpRequest carrier, String key, String value) {
+          HttpHeaders hdrs = carrier.getHeaders();
+          hdrs.set(key, value);
+        }
+      };
+
+  /**
+   * Create an instance of {@code TracingAsyncClientHttpRequestInterceptor}.
+   *
+   * @param extractor {@link HttpExtractor} to extract request and response specific attributes. If
+   *     it is null then default extractor is used.
+   * @param propagator {@link TextFormat} to propagate trace context to remote peer. If it is null
+   *     then default propagator (TraceContextFormat) is used.
+   * @return {@code TracingAsyncClientHttpRequestInterceptor}
+   * @since 0.23.0
+   */
+  public static TracingAsyncClientHttpRequestInterceptor create(
+      @Nullable TextFormat propagator,
+      @Nullable HttpExtractor<HttpRequest, ClientHttpResponse> extractor) {
+    return new TracingAsyncClientHttpRequestInterceptor(propagator, extractor);
+  }
+
+  TracingAsyncClientHttpRequestInterceptor(
+      @Nullable TextFormat propagator,
+      @Nullable HttpExtractor<HttpRequest, ClientHttpResponse> extractor) {
+
+    tracer = Tracing.getTracer();
+
+    if (propagator == null) {
+      propagator = Tracing.getPropagationComponent().getTraceContextFormat();
+    }
+
+    if (extractor == null) {
+      extractor = (HttpExtractor<HttpRequest, ClientHttpResponse>) new HttpClientExtractor();
+    }
+
+    handler =
+        new HttpClientHandler<HttpRequest, ClientHttpResponse, HttpRequest>(
+            Tracing.getTracer(), extractor, propagator, setter);
+  }
+
+  /**
+   * It intercepts http requests and starts a span.
+   *
+   * @since 0.23.0
+   */
+  public ListenableFuture<ClientHttpResponse> intercept(
+      HttpRequest request,
+      byte[] body,
+      org.springframework.http.client.AsyncClientHttpRequestExecution execution)
+      throws IOException {
+    HttpRequestContext context = handler.handleStart(tracer.getCurrentSpan(), request, request);
+
+    Scope ws = tracer.withSpan(handler.getSpanFromContext(context));
+    try {
+      ListenableFuture<ClientHttpResponse> result = execution.executeAsync(request, body);
+      result.addCallback(
+          new TracingAsyncClientHttpRequestInterceptor.TraceListenableFutureCallback(
+              context, handler));
+      return result;
+    } catch (IOException e) {
+      handler.handleEnd(context, null, null, e);
+      throw e;
+    } finally {
+      if (ws != null) {
+        ws.close();
+      }
+    }
+  }
+
+  static final class TraceListenableFutureCallback
+      implements ListenableFutureCallback<ClientHttpResponse> {
+    HttpRequestContext context;
+    final HttpClientHandler<HttpRequest, ClientHttpResponse, HttpRequest> handler;
+
+    TraceListenableFutureCallback(
+        HttpRequestContext context,
+        HttpClientHandler<HttpRequest, ClientHttpResponse, HttpRequest> handler) {
+      this.context = context;
+      this.handler = handler;
+    }
+
+    public void onFailure(Throwable ex) {
+      handler.handleEnd(context, null, null, ex);
+    }
+
+    public void onSuccess(@Nullable ClientHttpResponse result) {
+      handler.handleEnd(context, null, result, (Throwable) null);
+    }
+  }
+
+  /** This class extracts attributes from {@link HttpRequest} and {@link ClientHttpResponse}. */
+  @ExperimentalApi
+  static final class HttpClientExtractor extends HttpExtractor<HttpRequest, ClientHttpResponse> {
+    @Override
+    public String getHost(HttpRequest request) {
+      return request.getURI().getHost();
+    }
+
+    @Override
+    @Nullable
+    public String getMethod(HttpRequest request) {
+      HttpMethod method = request.getMethod();
+      if (method != null) {
+        return method.toString();
+      }
+      return null;
+    }
+
+    @Override
+    public String getPath(HttpRequest request) {
+      return request.getURI().getPath();
+    }
+
+    @Override
+    @Nullable
+    public String getUserAgent(HttpRequest request) {
+      return request.getHeaders().getFirst("User-Agent");
+    }
+
+    @Override
+    public int getStatusCode(@Nullable ClientHttpResponse response) {
+      if (response != null) {
+        try {
+          return response.getStatusCode().value();
+        } catch (Exception e) {
+          return 0;
+        }
+      }
+      return 0;
+    }
+
+    @Override
+    public String getRoute(HttpRequest request) {
+      return "";
+    }
+
+    @Override
+    public String getUrl(HttpRequest request) {
+      try {
+        return request.getURI().toURL().toString();
+      } catch (MalformedURLException e) {
+        return "";
+      }
+    }
+  }
+}
diff --git a/contrib/spring/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/contrib/spring/src/main/resources/META-INF/additional-spring-configuration-metadata.json
new file mode 100644
index 0000000..803fedd
--- /dev/null
+++ b/contrib/spring/src/main/resources/META-INF/additional-spring-configuration-metadata.json
@@ -0,0 +1,8 @@
+{"properties": [
+  {
+    "name": "opencensus.spring.enabled",
+    "type": "java.lang.Boolean",
+    "description": "Enable Spring Integration Opencensus Instrumentation.",
+    "defaultValue": true
+  }
+]}
\ No newline at end of file
diff --git a/contrib/spring/src/main/resources/META-INF/spring.factories b/contrib/spring/src/main/resources/META-INF/spring.factories
new file mode 100644
index 0000000..e01f69b
--- /dev/null
+++ b/contrib/spring/src/main/resources/META-INF/spring.factories
@@ -0,0 +1,4 @@
+# Auto Configuration
+org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
+io.opencensus.contrib.spring.autoconfig.OpenCensusAutoConfiguration,\
+io.opencensus.contrib.spring.autoconfig.TraceWebAsyncClientAutoConfiguration
diff --git a/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/AbstractMvcIntegrationTest.java b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/AbstractMvcIntegrationTest.java
new file mode 100644
index 0000000..223dc32
--- /dev/null
+++ b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/AbstractMvcIntegrationTest.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.instrument.web;
+
+import org.junit.Before;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.test.context.web.WebAppConfiguration;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.setup.DefaultMockMvcBuilder;
+import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+import org.springframework.web.context.WebApplicationContext;
+import org.springframework.web.servlet.view.InternalResourceViewResolver;
+
+@WebAppConfiguration
+public abstract class AbstractMvcIntegrationTest {
+
+  @Autowired protected WebApplicationContext webApplicationContext;
+
+  protected MockMvc mockMvc;
+
+  @Before
+  public void setup() {
+    InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
+    viewResolver.setPrefix("/WEB-INF/jsp/view/");
+    viewResolver.setSuffix(".jsp");
+    DefaultMockMvcBuilder mockMvcBuilder =
+        MockMvcBuilders.webAppContextSetup(this.webApplicationContext);
+    configureMockMvcBuilder(mockMvcBuilder);
+    this.mockMvc = mockMvcBuilder.build();
+  }
+
+  /**
+   * Override in a subclass to modify mockMvcBuilder configuration (e.g. add filter).
+   *
+   * <p>The method from super class should be called.
+   */
+  protected void configureMockMvcBuilder(DefaultMockMvcBuilder mockMvcBuilder) {}
+}
diff --git a/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilterIntegrationTests.java b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilterIntegrationTests.java
new file mode 100644
index 0000000..70bc9f6
--- /dev/null
+++ b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/HttpServletFilterIntegrationTests.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.instrument.web;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.testing.export.TestHandler;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceParams;
+import io.opencensus.trace.export.SpanData;
+import io.opencensus.trace.export.SpanExporter;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.MediaType;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.web.servlet.MvcResult;
+import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
+import org.springframework.test.web.servlet.setup.DefaultMockMvcBuilder;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+@RunWith(SpringRunner.class)
+@SpringBootTest(
+    classes = HttpServletFilterIntegrationTests.Config.class,
+    properties = "opencensus.spring.enabled=true")
+@ContextConfiguration(
+    locations = {"file:src/test/resources/beans/HttpServletFilterIntegrationTest-context.xml"})
+public class HttpServletFilterIntegrationTests extends AbstractMvcIntegrationTest {
+
+  private TestHandler handler;
+
+  @Autowired HttpServletFilter httpServletFilter;
+
+  @Before
+  @Override
+  public void setup() {
+    super.setup();
+    handler = new TestHandler();
+
+    SpanExporter exporter = Tracing.getExportComponent().getSpanExporter();
+    exporter.registerHandler("testing", handler);
+
+    TraceParams params =
+        Tracing.getTraceConfig()
+            .getActiveTraceParams()
+            .toBuilder()
+            .setSampler(Samplers.alwaysSample())
+            .build();
+    Tracing.getTraceConfig().updateActiveTraceParams(params);
+  }
+
+  @After
+  public void teardown() {
+    SpanExporter exporter = Tracing.getExportComponent().getSpanExporter();
+    exporter.unregisterHandler("testing");
+  }
+
+  @Override
+  protected void configureMockMvcBuilder(DefaultMockMvcBuilder mockMvcBuilder) {
+    mockMvcBuilder.addFilters(this.httpServletFilter);
+  }
+
+  @Test(timeout = 10000)
+  public void shouldCreateServerTrace() throws Exception {
+    sendRequest();
+
+    List<SpanData> data = handler.waitForExport(1);
+    assertThat(data).isNotNull();
+    assertThat(data.size()).isEqualTo(1);
+    assertThat(data.get(0).getName()).isEqualTo("/foo");
+  }
+
+  private MvcResult sendRequest() throws Exception {
+    MvcResult result =
+        this.mockMvc
+            .perform(MockMvcRequestBuilders.get("/foo").accept(MediaType.TEXT_PLAIN))
+            .andReturn();
+    return result;
+  }
+
+  @Configuration
+  protected static class Config {
+
+    @RestController
+    public static class TestController {
+
+      @RequestMapping("/foo")
+      public String ping() {
+        return "fooResult";
+      }
+    }
+  }
+}
diff --git a/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/TraceWebAsyncClientAutoConfigurationTest.java b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/TraceWebAsyncClientAutoConfigurationTest.java
new file mode 100644
index 0000000..11f624c
--- /dev/null
+++ b/contrib/spring/src/test/java/io/opencensus/contrib/spring/instrument/web/TraceWebAsyncClientAutoConfigurationTest.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.contrib.spring.instrument.web;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.trace.Span.Kind.CLIENT;
+import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
+
+import io.opencensus.common.Scope;
+import io.opencensus.contrib.http.util.HttpTraceAttributeConstants;
+import io.opencensus.testing.export.TestHandler;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceParams;
+import io.opencensus.trace.export.SpanData;
+import io.opencensus.trace.export.SpanExporter;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.List;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.annotation.Order;
+import org.springframework.core.env.Environment;
+import org.springframework.http.ResponseEntity;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+@RunWith(SpringJUnit4ClassRunner.class)
+@SpringBootTest(
+    classes = {TraceWebAsyncClientAutoConfigurationTest.TestConfiguration.class},
+    properties = "opencensus.spring.enabled=true",
+    webEnvironment = RANDOM_PORT)
+@SuppressWarnings("deprecation")
+public class TraceWebAsyncClientAutoConfigurationTest {
+  @Autowired org.springframework.web.client.AsyncRestTemplate asyncRestTemplate;
+
+  @Autowired Environment environment;
+
+  Tracer tracer;
+
+  private TestHandler handler;
+
+  @Before
+  public void setup() {
+    handler = new TestHandler();
+
+    SpanExporter exporter = Tracing.getExportComponent().getSpanExporter();
+    exporter.registerHandler("testing", handler);
+
+    TraceParams params =
+        Tracing.getTraceConfig()
+            .getActiveTraceParams()
+            .toBuilder()
+            .setSampler(Samplers.alwaysSample())
+            .build();
+    Tracing.getTraceConfig().updateActiveTraceParams(params);
+  }
+
+  @Test(timeout = 10000)
+  @Order(1)
+  public void should_close_span_upon_success_callback()
+      throws ExecutionException, InterruptedException {
+    tracer = Tracing.getTracer();
+    Span initialSpan = tracer.spanBuilder("initial").startSpan();
+
+    try (Scope ws = tracer.withSpan(initialSpan)) {
+      ListenableFuture<ResponseEntity<String>> future =
+          asyncRestTemplate.getForEntity("http://localhost:" + port() + "/async", String.class);
+      String result = future.get().getBody();
+
+      assertThat(result).isEqualTo("async");
+    } finally {
+      initialSpan.end();
+    }
+
+    // 3 spans are initial, client, server.
+    List<SpanData> spans = handler.waitForExport(3);
+    SpanData clientSpan = null;
+    for (SpanData span : spans) {
+      if (span.getKind() == CLIENT) {
+        clientSpan = span;
+        assertThat(clientSpan.getName()).isEqualTo("/async");
+        assertThat(clientSpan.getStatus().isOk()).isTrue();
+        assertThat(
+                clientSpan
+                    .getAttributes()
+                    .getAttributeMap()
+                    .get(HttpTraceAttributeConstants.HTTP_METHOD))
+            .isEqualTo(AttributeValue.stringAttributeValue("GET"));
+        assertThat(
+                clientSpan
+                    .getAttributes()
+                    .getAttributeMap()
+                    .get(HttpTraceAttributeConstants.HTTP_HOST))
+            .isEqualTo(AttributeValue.stringAttributeValue("localhost"));
+        assertThat(
+                clientSpan
+                    .getAttributes()
+                    .getAttributeMap()
+                    .get(HttpTraceAttributeConstants.HTTP_PATH))
+            .isEqualTo(AttributeValue.stringAttributeValue("/async"));
+        assertThat(clientSpan.getKind()).isEqualTo(CLIENT);
+        break;
+      }
+    }
+    assertThat(clientSpan).isNotNull();
+  }
+
+  @Test(timeout = 10000)
+  @Order(2)
+  public void should_close_span_upon_failure_callback() {
+    boolean exceptionOccured = false;
+    final ListenableFuture<ResponseEntity<String>> future;
+    try {
+      future = asyncRestTemplate.getForEntity("http://localhost:" + port() + "/fail", String.class);
+      new Thread(
+              new Runnable() {
+                @Override
+                public void run() {
+                  try {
+                    Thread.sleep(100);
+                  } catch (Exception e) {
+                    System.out.println("exception " + e);
+                  }
+                  future.cancel(true);
+                }
+              })
+          .start();
+      future.get(500, TimeUnit.MILLISECONDS);
+    } catch (CancellationException e) {
+      assertThat(e).isInstanceOf(CancellationException.class);
+      exceptionOccured = true;
+    } catch (Exception e) {
+      Assert.fail("unexpected exception:" + e);
+    }
+    assertThat(exceptionOccured).isTrue();
+
+    List<SpanData> spans = handler.waitForExport(1);
+    System.out.println("Spans " + spans.toString());
+    SpanData span = spans.get(0);
+    assertThat(span.getName()).isEqualTo("/fail");
+    assertThat(span.getStatus().isOk()).isFalse();
+    assertThat(span.getAttributes().getAttributeMap().get(HttpTraceAttributeConstants.HTTP_METHOD))
+        .isEqualTo(AttributeValue.stringAttributeValue("GET"));
+    assertThat(span.getAttributes().getAttributeMap().get(HttpTraceAttributeConstants.HTTP_HOST))
+        .isEqualTo(AttributeValue.stringAttributeValue("localhost"));
+    assertThat(span.getAttributes().getAttributeMap().get(HttpTraceAttributeConstants.HTTP_PATH))
+        .isEqualTo(AttributeValue.stringAttributeValue("/fail"));
+    assertThat(
+            span.getAttributes()
+                .getAttributeMap()
+                .get(HttpTraceAttributeConstants.HTTP_STATUS_CODE))
+        .isEqualTo(AttributeValue.longAttributeValue(0));
+    assertThat(span.getKind()).isEqualTo(CLIENT);
+  }
+
+  int port() {
+    Integer port = environment.getProperty("local.server.port", Integer.class);
+    if (port != null) {
+      return port;
+    }
+    return 0;
+  }
+
+  @EnableAutoConfiguration
+  @Configuration
+  public static class TestConfiguration {
+
+    @Bean
+    org.springframework.web.client.AsyncRestTemplate restTemplate() {
+      return new org.springframework.web.client.AsyncRestTemplate();
+    }
+  }
+
+  @RestController
+  public static class MyController {
+
+    @RequestMapping("/async")
+    String foo() {
+      try {
+        Thread.sleep(100);
+      } catch (Exception e) {
+        System.out.println(e);
+      }
+      return "async";
+    }
+
+    @RequestMapping("/fail")
+    String fail() throws Exception {
+      Thread.sleep(1000);
+      throw new RuntimeException("fail");
+    }
+  }
+}
diff --git a/contrib/spring/src/test/resources/META-INF/spring.factories b/contrib/spring/src/test/resources/META-INF/spring.factories
new file mode 100644
index 0000000..d0ef10e
--- /dev/null
+++ b/contrib/spring/src/test/resources/META-INF/spring.factories
@@ -0,0 +1,4 @@
+# Auto Configuration
+org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
+io.opencensus.contrib.spring.autoconfig.OpenCensusAutoConfiguration,\
+io.opencensus.contrib.spring.autoconfig.TraceWebAsyncClientAutoConfiguration
\ No newline at end of file
diff --git a/contrib/spring/src/test/resources/beans/HttpServletFilterIntegrationTest-context.xml b/contrib/spring/src/test/resources/beans/HttpServletFilterIntegrationTest-context.xml
new file mode 100644
index 0000000..6950063
--- /dev/null
+++ b/contrib/spring/src/test/resources/beans/HttpServletFilterIntegrationTest-context.xml
@@ -0,0 +1,9 @@
+<beans xmlns="http://www.springframework.org/schema/beans"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="
+		http://www.springframework.org/schema/beans
+		http://www.springframework.org/schema/beans/spring-beans-3.2.xsd">
+
+  <bean class="io.opencensus.contrib.spring.instrument.web.HttpServletFilter"/>
+
+</beans>
\ No newline at end of file
diff --git a/contrib/spring_sleuth_v1x/README.md b/contrib/spring_sleuth_v1x/README.md
index 3345783..a54eb1f 100644
--- a/contrib/spring_sleuth_v1x/README.md
+++ b/contrib/spring_sleuth_v1x/README.md
@@ -22,7 +22,7 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-spring-sleuth</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <exclusions>
       <exclusion>
 	    <groupId>org.springframework.cloud</groupId>
@@ -38,8 +38,8 @@
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-contrib-spring-sleuth:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-contrib-spring-sleuth:0.28.3'
 ```
 
 [travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
diff --git a/contrib/spring_sleuth_v1x/src/main/java/io/opencensus/contrib/spring/sleuth/v1x/OpenCensusSleuthSpanContextHolder.java b/contrib/spring_sleuth_v1x/src/main/java/io/opencensus/contrib/spring/sleuth/v1x/OpenCensusSleuthSpanContextHolder.java
index db6a355..37bf236 100644
--- a/contrib/spring_sleuth_v1x/src/main/java/io/opencensus/contrib/spring/sleuth/v1x/OpenCensusSleuthSpanContextHolder.java
+++ b/contrib/spring_sleuth_v1x/src/main/java/io/opencensus/contrib/spring/sleuth/v1x/OpenCensusSleuthSpanContextHolder.java
@@ -18,7 +18,8 @@
 
 import io.grpc.Context;
 import io.opencensus.common.ExperimentalApi;
-import io.opencensus.trace.unsafe.ContextUtils;
+import io.opencensus.trace.ContextHandle;
+import io.opencensus.trace.unsafe.ContextHandleUtils;
 import org.apache.commons.logging.Log;
 import org.springframework.cloud.sleuth.Span;
 import org.springframework.core.NamedThreadLocal;
@@ -136,7 +137,7 @@
     final boolean autoClose;
     @javax.annotation.Nullable final SpanContext parent;
     final OpenCensusSleuthSpan ocSpan;
-    final Context ocCurrentContext;
+    final ContextHandle ocCurrentContext;
 
     private SpanContext(Span span, boolean autoClose) {
       this.span = span;
@@ -144,7 +145,7 @@
       this.parent = CURRENT_SPAN.get();
       this.ocSpan = new OpenCensusSleuthSpan(span);
       this.ocCurrentContext =
-          Context.current().withValue(ContextUtils.CONTEXT_SPAN_KEY, this.ocSpan);
+          ContextHandleUtils.withValue(ContextHandleUtils.currentContext(), this.ocSpan);
     }
   }
 
diff --git a/contrib/spring_starter/README.md b/contrib/spring_starter/README.md
new file mode 100644
index 0000000..495de56
--- /dev/null
+++ b/contrib/spring_starter/README.md
@@ -0,0 +1,84 @@
+# OpenCensus Spring Starter
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+[![Maven Central][maven-image]][maven-url]
+
+The *OpenCensus Spring Starter for Java* is a starter package that includes
+packages required to enable tracing using opencensus when working with [Spring Web][spring-web-url].
+
+This version is compatible with [Spring Boot 2.0][spring-boot-2.0-url].
+
+## Servlet and AsyncRestTemplate Tracing
+
+Enable tracing on RestController (server side) and AysncRestTemplate (client side) by simply including opencensus-contrib-spring-starter in your dependencies and
+initializing exporter. It automatically traces your http request and collects stats associated with the
+request.
+
+It does require to register exporter and views.
+
+### Depedencies
+
+#### Maven
+
+```xml
+<dependencies>
+    <dependency>
+        <groupId>io.opencensus</groupId>
+        <artifactId>opencensus-contrib-spring-starter</artifactId>
+        <version>0.23.0</version>
+    </dependency>
+</dependencies>
+
+```
+
+#### Gradle
+```gradle
+dependencyManagement {
+    imports {
+        mavenBom "io.opencensus:opencensus-contrib-spring-starter:0.23.0"
+    }
+}
+dependencies {
+	compile 'io.opencensus:opencensus-contrib-spring:0.23.0'
+}
+```
+
+### Tracing Properties
+
+Optionally configure one or more Tracing Properties in application.properties file.
+
+
+#### PublicEndpoint
+
+If a servlet is serving publicEndpoints (untrusted boundary) then set this property to `true`. 
+When set to true incoming trace context is added as a parent link instead of as a parent.
+By default it is set to `false`. When set to `false` it uses incoming trace context as a parent.
+
+```
+opencensus.spring.trace.publicEndpoint = true
+```
+
+#### Propagation
+
+opencensus.spring.trace.propagation = TRACE_PROPAGATION_B3
+
+By default it is set to TRACE_PROPAGATION_TRACE_CONTEXT which uses [W3C Tracecontext](https://github.com/census-instrumentation/opencensus-java/blob/master/api/src/main/java/io/opencensus/trace/propagation/TextFormat.java)
+propagator to propagate trace context. To use [B3 format](https://github.com/census-instrumentation/opencensus-java/blob/master/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/B3Format.java) 
+set the property to TRACE_PROPAGATION_B3
+
+```
+opencensus.spring.trace.propagation = TRACE_PROPAGATION_B3
+```
+
+#### Java Versions
+
+Java 6 or above is required for using this artifact.
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-spring-starter/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-contrib-spring-starter
+[spring-boot-2.0-url]: https://github.com/spring-projects/spring-boot/tree/2.0.x
+[spring-web-url]: https://github.com/spring-projects/spring-framework/tree/master/spring-web
diff --git a/contrib/spring_starter/build.gradle b/contrib/spring_starter/build.gradle
new file mode 100644
index 0000000..96691a7
--- /dev/null
+++ b/contrib/spring_starter/build.gradle
@@ -0,0 +1,18 @@
+plugins {
+    id 'java'
+}
+
+description = 'OpenCensus Spring Cloud Starter'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.6
+    it.targetCompatibility = 1.6
+}
+
+dependencies {
+    compile project(':opencensus-api'),
+            project(':opencensus-contrib-spring'),
+            project(':opencensus-impl')
+
+    signature "org.codehaus.mojo.signature:java16:+@signature"
+}
diff --git a/contrib/spring_starter/src/main/resources/META-INF/spring.provides b/contrib/spring_starter/src/main/resources/META-INF/spring.provides
new file mode 100644
index 0000000..ffe5e4d
--- /dev/null
+++ b/contrib/spring_starter/src/main/resources/META-INF/spring.provides
@@ -0,0 +1 @@
+provides: opencensus-contrib-spring
\ No newline at end of file
diff --git a/contrib/zpages/README.md b/contrib/zpages/README.md
index 2a535ce..c7895ee 100644
--- a/contrib/zpages/README.md
+++ b/contrib/zpages/README.md
@@ -16,27 +16,27 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-contrib-zpages</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-contrib-zpages:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-contrib-zpages:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 ### Register the Z-Pages
diff --git a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/StatszZPageHandler.java b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/StatszZPageHandler.java
index 00c72d6..28f9502 100644
--- a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/StatszZPageHandler.java
+++ b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/StatszZPageHandler.java
@@ -66,7 +66,13 @@
 */
 
 /** HTML page formatter for all exported {@link View}s. */
-@SuppressWarnings("deprecation")
+@SuppressWarnings({
+  "deprecation",
+
+  // This library is not supposed to be Android or Java 7 compatible.
+  "AndroidJdkLibsChecker",
+  "Java7ApiChecker"
+})
 final class StatszZPageHandler extends ZPageHandler {
 
   private static final Object monitor = new Object();
diff --git a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/TracezZPageHandler.java b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/TracezZPageHandler.java
index f6a3699..9c5ef18 100644
--- a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/TracezZPageHandler.java
+++ b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/TracezZPageHandler.java
@@ -222,7 +222,7 @@
               new ArrayList<>(
                   runningSpanStore.getRunningSpans(RunningSpanStore.Filter.create(spanName, 0)));
           // Sort active spans incremental.
-          Collections.sort(spans, new SpanDataComparator(true));
+          Collections.sort(spans, new SpanDataComparator(/* incremental= */ true));
         } else {
           String subtypeStr = queryMap.get(HEADER_SAMPLES_SUB_TYPE);
           if (subtypeStr != null) {
@@ -252,7 +252,7 @@
                               latencyBucketBoundaries.getLatencyUpperNs(),
                               0)));
               // Sort sampled spans decremental.
-              Collections.sort(spans, new SpanDataComparator(false));
+              Collections.sort(spans, new SpanDataComparator(/* incremental= */ false));
             }
           }
         }
@@ -296,14 +296,14 @@
                               .encode(span.getContext().getSpanId().getBytes())))
                   .build());
 
-      emitSingleSpan(out, formatter, span);
+      emitSingleSpan(formatter, span);
     }
     out.write("</pre>\n");
   }
 
   // Emits the internal html for a single {@link SpanData}.
   @SuppressWarnings("deprecation")
-  private static void emitSingleSpan(PrintWriter out, Formatter formatter, SpanData span) {
+  private static void emitSingleSpan(Formatter formatter, SpanData span) {
     Calendar calendar = Calendar.getInstance();
     calendar.setTimeInMillis(TimeUnit.SECONDS.toMillis(span.getStartTimestamp().getSeconds()));
     long microsField = TimeUnit.NANOSECONDS.toMicros(span.getStartTimestamp().getNanos());
diff --git a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHandlers.java b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHandlers.java
index 710e9a2..73daf64 100644
--- a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHandlers.java
+++ b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHandlers.java
@@ -79,6 +79,7 @@
       StatszZPageHandler.create(Stats.getViewManager());
 
   private static final Object monitor = new Object();
+  private static volatile boolean isRunningSpanStoreInitialized = false;
 
   @GuardedBy("monitor")
   @Nullable
@@ -94,10 +95,15 @@
    * <p>If no sampled spans based on latency and error codes are available for a given name, make
    * sure that the span name is registered to the {@code SampledSpanStore}.
    *
+   * <p>When this method is called for the first time, {@link
+   * io.opencensus.trace.export.RunningSpanStore} will be enabled automatically. Subsequent calls
+   * won't update {@link io.opencensus.trace.export.RunningSpanStore} again.
+   *
    * @return a {@code ZPageHandler} for tracing debug.
    * @since 0.6
    */
   public static ZPageHandler getTracezZPageHandler() {
+    enableRunningSpanStore();
     return tracezZPageHandler;
   }
 
@@ -196,5 +202,19 @@
     }
   }
 
+  // Sets the maximum number of elements as Integer.MAX_VALUE to enable RunningSpanStore.
+  // This method will only execute once even if called multiple times.
+  private static void enableRunningSpanStore() {
+    if (!isRunningSpanStoreInitialized) {
+      synchronized (monitor) {
+        if (isRunningSpanStoreInitialized) {
+          return; // Already initialized, small race
+        }
+        Tracing.getExportComponent().getRunningSpanStore().setMaxNumberOfSpans(Integer.MAX_VALUE);
+        isRunningSpanStoreInitialized = true;
+      }
+    }
+  }
+
   private ZPageHandlers() {}
 }
diff --git a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHttpHandler.java b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHttpHandler.java
index 975bdfc..6881205 100644
--- a/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHttpHandler.java
+++ b/contrib/zpages/src/main/java/io/opencensus/contrib/zpages/ZPageHttpHandler.java
@@ -22,11 +22,12 @@
 import com.sun.net.httpserver.HttpHandler;
 import io.opencensus.common.Scope;
 import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.EndSpanOptions;
+import io.opencensus.trace.Span;
 import io.opencensus.trace.Tracer;
 import io.opencensus.trace.Tracing;
 import java.io.IOException;
 import java.net.URI;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -34,8 +35,11 @@
 
 /** An {@link HttpHandler} that can be used to render HTML pages using any {@code ZPageHandler}. */
 final class ZPageHttpHandler implements HttpHandler {
+
   private static final Tracer tracer = Tracing.getTracer();
   private static final String HTTP_SERVER = "HttpServer";
+  private static final EndSpanOptions END_SPAN_OPTIONS =
+      EndSpanOptions.builder().setSampleToLocalSpanStore(true).build();
   private final ZPageHandler zpageHandler;
   private final String httpServerSpanName;
 
@@ -43,28 +47,24 @@
   ZPageHttpHandler(ZPageHandler zpageHandler) {
     this.zpageHandler = zpageHandler;
     this.httpServerSpanName = HTTP_SERVER + zpageHandler.getUrlPath();
-    Tracing.getExportComponent()
-        .getSampledSpanStore()
-        .registerSpanNamesForCollection(Arrays.asList(httpServerSpanName));
   }
 
   @Override
   public final void handle(HttpExchange httpExchange) throws IOException {
-    try (Scope ss =
+    Span span =
         tracer
             .spanBuilderWithExplicitParent(httpServerSpanName, null)
             .setRecordEvents(true)
-            .startScopedSpan()) {
-      tracer
-          .getCurrentSpan()
-          .putAttribute(
-              "/http/method ",
-              AttributeValue.stringAttributeValue(httpExchange.getRequestMethod()));
+            .startSpan();
+    try (Scope ss = tracer.withSpan(span)) {
+      span.putAttribute(
+          "/http/method ", AttributeValue.stringAttributeValue(httpExchange.getRequestMethod()));
       httpExchange.sendResponseHeaders(200, 0);
       zpageHandler.emitHtml(
           uriQueryToMap(httpExchange.getRequestURI()), httpExchange.getResponseBody());
     } finally {
       httpExchange.close();
+      span.end(END_SPAN_OPTIONS);
     }
   }
 
@@ -83,6 +83,6 @@
         result.put(splits.get(0), "");
       }
     }
-    return result;
+    return Collections.unmodifiableMap(result);
   }
 }
diff --git a/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/RpczZPageHandlerTest.java b/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/RpczZPageHandlerTest.java
index 2a75fe8..6031f62 100644
--- a/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/RpczZPageHandlerTest.java
+++ b/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/RpczZPageHandlerTest.java
@@ -38,14 +38,13 @@
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.mockito.Mock;
 import org.mockito.Mockito;
 
 /** Unit tests for {@link RpczZPageHandler}. */
 @RunWith(JUnit4.class)
 public class RpczZPageHandlerTest {
 
-  @Mock private final ViewManager mockViewManager = Mockito.mock(ViewManager.class);
+  private final ViewManager mockViewManager = Mockito.mock(ViewManager.class);
 
   private static final TagValue METHOD_1 = TagValue.create("method1");
   private static final TagValue METHOD_2 = TagValue.create("method2");
diff --git a/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/StatszZPageHandlerTest.java b/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/StatszZPageHandlerTest.java
index 81e64a6..50e5f42 100644
--- a/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/StatszZPageHandlerTest.java
+++ b/contrib/zpages/src/test/java/io/opencensus/contrib/zpages/StatszZPageHandlerTest.java
@@ -54,14 +54,13 @@
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.mockito.Mock;
 import org.mockito.Mockito;
 
 /** Unit tests for {@link StatszZPageHandler}. */
 @RunWith(JUnit4.class)
 public class StatszZPageHandlerTest {
 
-  @Mock private final ViewManager mockViewManager = Mockito.mock(ViewManager.class);
+  private final ViewManager mockViewManager = Mockito.mock(ViewManager.class);
 
   private static final View MY_VIEW =
       View.create(
@@ -82,7 +81,7 @@
           0.2,
           16.3,
           234.56,
-          Arrays.asList(0L, 1L, 1L, 2L, 1L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L));
+          Arrays.asList(1L, 1L, 2L, 1L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L));
   private static final AggregationData.DistributionData DISTRIBUTION_DATA_2 =
       AggregationData.DistributionData.create(
           7.9,
diff --git a/examples/BUILD.bazel b/examples/BUILD.bazel
deleted file mode 100644
index a93cefd..0000000
--- a/examples/BUILD.bazel
+++ /dev/null
@@ -1,155 +0,0 @@
-load("//:opencensus_workspace.bzl", "opencensus_java_libraries")
-load("@grpc_java//:java_grpc_library.bzl", "java_grpc_library")
-
-opencensus_java_libraries()
-
-proto_library(
-    name = "helloworld_proto",
-    srcs = ["src/main/proto/helloworld.proto"],
-)
-
-java_proto_library(
-    name = "helloworld_java_proto",
-    deps = [":helloworld_proto"],
-)
-
-java_grpc_library(
-    name = "helloworld_java_grpc",
-    srcs = [":helloworld_proto"],
-    deps = [":helloworld_java_proto"],
-)
-
-java_library(
-    name = "opencensus_examples",
-    srcs = glob(
-        ["src/main/java/**/*.java"],
-    ),
-    deps = [
-        ":helloworld_java_grpc",
-        ":helloworld_java_proto",
-        "@com_google_guava_guava//jar",
-        "@com_google_code_findbugs_jsr305//jar",
-        "@io_opencensus_opencensus_api//jar",
-        "@io_opencensus_opencensus_contrib_grpc_metrics//jar",
-        "@io_opencensus_opencensus_contrib_zpages//jar",
-        "@io_opencensus_opencensus_exporter_stats_prometheus//jar",
-        "@io_opencensus_opencensus_exporter_stats_stackdriver//jar",
-        "@io_opencensus_opencensus_exporter_trace_logging//jar",
-        "@io_opencensus_opencensus_exporter_trace_stackdriver//jar",
-        "@io_grpc_grpc_core//jar",
-        "@io_grpc_grpc_netty//jar",
-        "@io_grpc_grpc_protobuf//jar",
-        "@io_grpc_grpc_stub//jar",
-        "@io_prometheus_simpleclient//jar",
-        "@io_prometheus_simpleclient_httpserver//jar",
-    ],
-    runtime_deps = [
-        "@com_google_api_api_common//jar",
-        "@com_google_api_gax//jar",
-        "@com_google_api_gax_grpc//jar",
-        "@com_google_api_grpc_proto_google_cloud_trace_v1//jar",
-        "@com_google_api_grpc_proto_google_cloud_trace_v2//jar",
-        "@com_google_api_grpc_proto_google_iam_v1//jar",
-        "@com_google_api_grpc_proto_google_cloud_monitoring_v3//jar",
-        "@com_google_api_grpc_proto_google_common_protos//jar",
-        "@com_google_auth_google_auth_library_credentials//jar",
-        "@com_google_auth_google_auth_library_oauth2_http//jar",
-        "@com_google_cloud_google_cloud_core//jar",
-        "@com_google_cloud_google_cloud_core_grpc//jar",
-        "@com_google_cloud_google_cloud_monitoring//jar",
-        "@com_google_cloud_google_cloud_trace//jar",
-        "@com_google_http_client_google_http_client//jar",
-        "@com_google_http_client_google_http_client_jackson2//jar",
-        "@com_google_instrumentation_instrumentation_api//jar",
-        "@com_google_protobuf_protobuf_java//jar",
-        "@com_google_protobuf_protobuf_java_util//jar",
-        "@commons_codec_commons_codec//jar",
-        "@commons_logging_commons_logging//jar",
-
-        "@com_lmax_disruptor//jar",
-        "@io_grpc_grpc_context//jar",
-        "@io_grpc_grpc_auth//jar",
-        "@io_grpc_grpc_protobuf_lite//jar",
-        "@io_netty_netty_buffer//jar",
-        "@io_netty_netty_common//jar",
-        "@io_netty_netty_codec//jar",
-        "@io_netty_netty_codec_socks//jar",
-        "@io_netty_netty_codec_http//jar",
-        "@io_netty_netty_codec_http2//jar",
-        "@io_netty_netty_handler//jar",
-        "@io_netty_netty_handler_proxy//jar",
-        "@io_netty_netty_resolver//jar",
-        "@io_netty_netty_tcnative_boringssl_static//jar",
-        "@io_netty_netty_transport//jar",
-        "@io_opencensus_opencensus_impl//jar",
-        "@io_opencensus_opencensus_impl_core//jar",
-        "@joda_time_joda_time//jar",
-        "@org_apache_httpcomponents_httpclient//jar",
-        "@org_apache_httpcomponents_httpcore//jar",
-        "@org_threeten_threetenbp//jar",
-    ],
-)
-
-java_binary(
-    name = "TagContextExample",
-    main_class = "io.opencensus.examples.tags.TagContextExample",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "MultiSpansTracing",
-    main_class = "io.opencensus.examples.trace.MultiSpansTracing",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "MultiSpansScopedTracing",
-    main_class = "io.opencensus.examples.trace.MultiSpansScopedTracing",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "MultiSpansContextTracing",
-    main_class = "io.opencensus.examples.trace.MultiSpansContextTracing",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "ZPagesTester",
-    main_class = "io.opencensus.examples.zpages.ZPagesTester",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "QuickStart",
-    main_class = "io.opencensus.examples.helloworld.QuickStart",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "HelloWorldClient",
-    main_class = "io.opencensus.examples.grpc.helloworld.HelloWorldClient",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
-
-java_binary(
-    name = "HelloWorldServer",
-    main_class = "io.opencensus.examples.grpc.helloworld.HelloWorldServer",
-    runtime_deps = [
-        ":opencensus_examples",
-    ],
-)
diff --git a/examples/README.md b/examples/README.md
index 921691b..1c0b76d 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -3,52 +3,44 @@
 ## To build the examples use
 
 ### Gradle
-```
-$ ./gradlew installDist
+```bash
+./gradlew installDist
 ```
 
 ### Maven
-```
-$ mvn package appassembler:assemble
+```bash
+mvn package appassembler:assemble
 ```
 
-### Bazel
+To build Spring Servlet example
+```bash
+cd spring/servlet
+mvn package appassembler:assemble
 ```
-$ bazel build :all
-```
+
 
 ## To run "TagContextExample" use
 
 ### Gradle
-```
-$ ./build/install/opencensus-examples/bin/TagContextExample
+```bash
+./build/install/opencensus-examples/bin/TagContextExample
 ```
 
 ### Maven
-```
-$ ./target/appassembler/bin/TagContextExample
-```
-
-### Bazel
-```
-$ ./bazel-bin/TagContextExample
+```bash
+./target/appassembler/bin/TagContextExample
 ```
 
 ## To run "ZPagesTester"
 
 ### Gradle
-```
-$ ./build/install/opencensus-examples/bin/ZPagesTester
+```bash
+./build/install/opencensus-examples/bin/ZPagesTester
 ```
 
 ### Maven
-```
-$ ./target/appassembler/bin/ZPagesTester
-```
-
-### Bazel
-```
-$ ./bazel-bin/ZPagesTester
+```bash
+./target/appassembler/bin/ZPagesTester
 ```
 
 Available pages:
@@ -65,18 +57,13 @@
 ## To run "QuickStart" example use
 
 ### Gradle
-```
-$ ./build/install/opencensus-examples/bin/QuickStart
+```bash
+./build/install/opencensus-examples/bin/QuickStart
 ```
 
 ### Maven
-```
-$ ./target/appassembler/bin/QuickStart
-```
-
-### Bazel
-```
-$ ./bazel-bin/QuickStart
+```bash
+./target/appassembler/bin/QuickStart
 ```
 
 ## To run "gRPC Hello World" example use
@@ -95,19 +82,119 @@
 However, if you want to specify any of these arguements, please make sure they are in order.
 
 ### Gradle
+```bash
+./build/install/opencensus-examples/bin/HelloWorldServer serverPort cloudProjectId zPagePort prometheusPort
+./build/install/opencensus-examples/bin/HelloWorldClient user host serverPort cloudProjectId zPagePort
 ```
-$ ./build/install/opencensus-examples/bin/HelloWorldServer serverPort cloudProjectId zPagePort prometheusPort
-$ ./build/install/opencensus-examples/bin/HelloWorldClient user host serverPort cloudProjectId zPagePort
+
+### Maven
+```bash
+./target/appassembler/bin/HelloWorldServer serverPort cloudProjectId zPagePort prometheusPort
+./target/appassembler/bin/HelloWorldClient user host serverPort cloudProjectId zPagePort
+```
+
+## To run "Repl" example
+
+See the full tutorial on [OpenCensus website](https://opencensus.io/quickstart/java/metrics/).
+
+First run:
+
+### Gradle
+```bash
+./build/install/opencensus-examples/bin/Repl
+```
+
+### Maven
+```bash
+./target/appassembler/bin/Repl
+```
+
+Then start the Prometheus process:
+```bash
+cd src/main/java/io/opencensus/examples/quickstart/
+prometheus --config.file=prometheus.yaml
+```
+
+Stats will be shown on Prometheus UI on http://localhost:9090.
+
+## To run "StackdriverQuickstart" use
+
+See the full tutorial on [OpenCensus website](https://opencensus.io/guides/exporters/supported-exporters/java/stackdriver/).
+
+### Gradle
+```bash
+./build/install/opencensus-examples/bin/StackdriverQuickstart
 ```
 
 ### Maven
 ```
-$ ./target/appassembler/bin/HelloWorldServer serverPort cloudProjectId zPagePort prometheusPort
-$ ./target/appassembler/bin/HelloWorldClient user host serverPort cloudProjectId zPagePort
+./target/appassembler/bin/StackdriverQuickstart
 ```
 
-### Bazel
+## To run HTTP Server and Client
+
+`HttpJettyServer` is a web service using Jetty Server on top of http-servlet.
+`HttpJettyClient` is a web client using Jetty Client that sends request to `HttpettyServer`.
+Both `HttpJettyServer` and `HttpJettyClient` are instrumented with OpenCensus.
+
+Traces from both client and server can be viewed in their respective logs on console.
+Stats are available from Prometheus server running at
+- http://localhost:9091/metrics - for client stats
+- http://localhost:9090/metrics - for server stats
+  
+
+### Gradle
+```bash
+./build/install/opencensus-examples/bin/HttpJettyServer
+./build/install/opencensus-examples/bin/HttpJettyClient
 ```
-$ ./bazel-bin/HelloWorldServer serverPort cloudProjectId zPagePort prometheusPort
-$ ./bazel-bin/HelloWorldClient user host serverPort cloudProjectId zPagePort
+
+### Maven
+```bash
+./target/appassembler/bin/HttpJettyServer
+./target/appassembler/bin/HttpJettyClient
 ```
+
+## To run OcAgentExportersQuickStart
+
+### Gradle
+```bash
+./build/install/opencensus-examples/bin/OcAgentExportersQuickStart agentEndpoint # default is localhost:56678
+```
+
+### Maven
+```bash
+./target/appassembler/bin/OcAgentExportersQuickStart agentEndpoint # default is localhost:56678
+```
+
+You also need to install and start OpenCensus-Agent in order to receive the traces and metrics.
+For more information on setting up Agent, see [tutorial](https://opencensus.io/agent/).
+
+## To run Spring HTTP Server and Client
+
+`SpringServletApplication` is a web service application using Spring framework. The application
+is instrumented with opencensus simply by incuding opencensus-contrib-spring-starter package.
+The instrumentation enables tracing on incoming and outgoing http requests. On receiving GET 
+request, the server originates multiple GET requests to itself using AsyncRestTemplate on different
+endpoint.
+
+Send a http GET request using curl to see the traces on console.
+```
+curl http://localhost:8080
+```
+
+Stats are available from Prometheus server running at
+- http://localhost:9090/metrics - for server and client stats
+  
+### Gradle
+```bash
+cd spring/servlet
+./gradlew bootRun
+```
+
+### Maven
+```bash
+cd spring/servlet
+./target/appassembler/bin/SpringServletApplication
+```
+
diff --git a/examples/WORKSPACE b/examples/WORKSPACE
deleted file mode 100644
index a065f96..0000000
--- a/examples/WORKSPACE
+++ /dev/null
@@ -1,53 +0,0 @@
-workspace(name = "opencensus_examples")
-
-git_repository(
-    name = "grpc_java",
-    remote = "https://github.com/grpc/grpc-java.git",
-    tag = "v1.10.1",
-)
-
-load("//:opencensus_workspace.bzl", "opencensus_maven_jars")
-load("@grpc_java//:repositories.bzl", "grpc_java_repositories")
-
-opencensus_maven_jars()
-grpc_java_repositories(
-    # Omit to avoid conflicts.
-
-    omit_com_google_auth_google_auth_library_credentials=True,
-    omit_com_google_api_grpc_google_common_protos=True,
-    omit_com_google_code_findbugs_jsr305=True,
-    omit_com_google_code_gson=True,
-    omit_com_google_errorprone_error_prone_annotations=True,
-    omit_com_google_guava=True,
-    omit_com_google_protobuf=True,
-    omit_com_google_protobuf_nano_protobuf_javanano=True,
-    omit_com_google_truth_truth=True,
-    omit_com_squareup_okhttp=True,
-    omit_com_squareup_okio=True,
-
-    # These netty dependencies have already been included in opencensus_workspace.bzl
-    omit_io_netty_buffer=True,
-    omit_io_netty_common=True,
-    omit_io_netty_handler_proxy=True,
-    omit_io_netty_codec_http2=True,
-    omit_io_netty_transport=True,
-    omit_io_netty_codec=True,
-    omit_io_netty_codec_socks=True,
-    omit_io_netty_codec_http=True,
-    omit_io_netty_handler=True,
-    omit_io_netty_resolver=True,
-
-    omit_io_opencensus_api=True,
-    omit_io_opencensus_grpc_metrics=True,
-    omit_junit_junit=True
-)
-
-# proto_library, cc_proto_library, and java_proto_library rules implicitly
-# depend on @com_google_protobuf for protoc and proto runtimes.
-# This statement defines the @com_google_protobuf repo.
-http_archive(
-    name = "com_google_protobuf",
-    sha256 = "1f8b9b202e9a4e467ff0b0f25facb1642727cdf5e69092038f15b37c75b99e45",
-    strip_prefix = "protobuf-3.5.1",
-    urls = ["https://github.com/google/protobuf/archive/v3.5.1.zip"],
-)
diff --git a/examples/build.gradle b/examples/build.gradle
index 22889e1..ac437cf 100644
--- a/examples/build.gradle
+++ b/examples/build.gradle
@@ -9,13 +9,21 @@
         }
     }
     dependencies {
-        classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.3'
+        classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.8'
+        classpath 'com.github.ben-manes:gradle-versions-plugin:0.20.0'
+        classpath "gradle.plugin.com.github.sherter.google-java-format:google-java-format-gradle-plugin:0.8"
     }
 }
 
+apply plugin: "checkstyle"
+apply plugin: 'com.google.protobuf'
+apply plugin: 'com.github.sherter.google-java-format'
 apply plugin: 'idea'
 apply plugin: 'java'
-apply plugin: 'com.google.protobuf'
+
+// Display the version report using: ./gradlew dependencyUpdates
+// Also see https://github.com/ben-manes/gradle-versions-plugin.
+apply plugin: 'com.github.ben-manes.versions'
 
 repositories {
     mavenCentral()
@@ -23,11 +31,13 @@
 }
 
 group = "io.opencensus"
-version = "0.17.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
+version = "0.32.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
 
-def opencensusVersion = "0.16.1" // LATEST_OPENCENSUS_RELEASE_VERSION
-def grpcVersion = "1.13.1" // CURRENT_GRPC_VERSION
-def prometheusVersion = "0.3.0"
+def opencensusVersion = "0.30.0" // LATEST_OPENCENSUS_RELEASE_VERSION
+def grpcVersion = "1.32.2" // CURRENT_GRPC_VERSION
+def prometheusVersion = "0.6.0"
+def jettyVersion = "9.4.17.v20190418"
+def tcnativeVersion = "2.0.20.Final"
 
 tasks.withType(JavaCompile) {
     sourceCompatibility = '1.8'
@@ -35,26 +45,49 @@
 }
 
 dependencies {
-    compile "com.google.api.grpc:proto-google-common-protos:1.11.0",
+    compile "com.google.api.grpc:proto-google-common-protos:1.12.0",
             "io.opencensus:opencensus-api:${opencensusVersion}",
             "io.opencensus:opencensus-contrib-zpages:${opencensusVersion}",
             "io.opencensus:opencensus-contrib-grpc-metrics:${opencensusVersion}",
+            "io.opencensus:opencensus-contrib-http-util:${opencensusVersion}",
+            "io.opencensus:opencensus-contrib-http-servlet:${opencensusVersion}",
+            "io.opencensus:opencensus-contrib-http-jetty-client:${opencensusVersion}",
+            "io.opencensus:opencensus-exporter-metrics-ocagent:${opencensusVersion}",
             "io.opencensus:opencensus-exporter-stats-prometheus:${opencensusVersion}",
             "io.opencensus:opencensus-exporter-stats-stackdriver:${opencensusVersion}",
+            "io.opencensus:opencensus-exporter-trace-jaeger:${opencensusVersion}",
             "io.opencensus:opencensus-exporter-trace-stackdriver:${opencensusVersion}",
             "io.opencensus:opencensus-exporter-trace-logging:${opencensusVersion}",
+            "io.opencensus:opencensus-exporter-trace-ocagent:${opencensusVersion}",
             "io.grpc:grpc-protobuf:${grpcVersion}",
             "io.grpc:grpc-stub:${grpcVersion}",
-            "io.grpc:grpc-netty:${grpcVersion}",
-            "io.prometheus:simpleclient_httpserver:${prometheusVersion}"
+            "io.grpc:grpc-netty-shaded:${grpcVersion}",
+            "io.grpc:grpc-census:${grpcVersion}",
+            "io.prometheus:simpleclient_httpserver:${prometheusVersion}",
+            "javax.servlet:javax.servlet-api:3.1.0",
+            "org.eclipse.jetty:jetty-server:${jettyVersion}",
+            "org.eclipse.jetty:jetty-client:${jettyVersion}",
+            "org.eclipse.jetty:jetty-servlet:${jettyVersion}",
+            "org.slf4j:slf4j-log4j12:1.7.25"
 
     runtime "io.opencensus:opencensus-impl:${opencensusVersion}",
-            "io.netty:netty-tcnative-boringssl-static:2.0.8.Final"
+            "io.netty:netty-tcnative-boringssl-static:${tcnativeVersion}"
+}
+
+googleJavaFormat {
+    toolVersion '1.7'
+    source = 'src/main'
+    include '**/*.java'
+}
+
+verifyGoogleJavaFormat {
+    source = 'src/main'
+    include '**/*.java'
 }
 
 protobuf {
     protoc {
-        artifact = 'com.google.protobuf:protoc:3.5.1-1'
+        artifact = 'com.google.protobuf:protoc:3.6.1'
     }
     plugins {
         grpc {
@@ -80,6 +113,17 @@
     }
 }
 
+checkstyle {
+    configFile = file("$rootDir/../buildscripts/checkstyle.xml")
+    toolVersion = "8.12"
+    ignoreFailures = false
+    configProperties["rootDir"] = "$rootDir/.."
+}
+
+// Disable checkstyle if no java8.
+checkstyleMain.source = 'src/main'
+checkstyleTest.source = 'src/main'
+
 // Provide convenience executables for trying out the examples.
 apply plugin: 'application'
 
@@ -141,6 +185,70 @@
     classpath = jar.outputs.files + project.configurations.runtime
 }
 
+task repl(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.quickstart.Repl'
+    applicationName = 'Repl'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task stackdriverStatsQuickstart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.stats.StackdriverQuickstart'
+    applicationName = 'StackdriverQuickstart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task derivedDoubleGaugeQuickstart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.gauges.DerivedDoubleGaugeQuickstart'
+    applicationName = 'DerivedDoubleGaugeQuickstart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task derivedLongGaugeQuickstart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.gauges.DerivedLongGaugeQuickstart'
+    applicationName = 'DerivedLongGaugeQuickstart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task doubleGaugeQuickstart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.gauges.DoubleGaugeQuickstart'
+    applicationName = 'DoubleGaugeQuickstart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task longGaugeQuickstart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.gauges.LongGaugeQuickstart'
+    applicationName = 'LongGaugeQuickstart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task httpJettyServer(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.http.jetty.server.HelloWorldServer'
+    applicationName = 'HttpJettyServer'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task httpJettyClient(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.http.jetty.client.HelloWorldClient'
+    applicationName = 'HttpJettyClient'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+task ocAgentExportersQuickStart(type: CreateStartScripts) {
+    mainClassName = 'io.opencensus.examples.ocagent.OcAgentExportersQuickStart'
+    applicationName = 'OcAgentExportersQuickStart'
+    outputDir = new File(project.buildDir, 'tmp')
+    classpath = jar.outputs.files + project.configurations.runtime
+}
+
+
 applicationDistribution.into('bin') {
     from(multiSpansTracing)
     from(multiSpansScopedTracing)
@@ -150,5 +258,14 @@
     from(quickStart)
     from(helloWorldServer)
     from(helloWorldClient)
+    from(repl)
+    from(stackdriverStatsQuickstart)
+    from(derivedDoubleGaugeQuickstart)
+    from(derivedLongGaugeQuickstart)
+    from(doubleGaugeQuickstart)
+    from(longGaugeQuickstart)
+    from(httpJettyServer)
+    from(httpJettyClient)
+    from(ocAgentExportersQuickStart)
     fileMode = 0755
 }
diff --git a/examples/opencensus_workspace.bzl b/examples/opencensus_workspace.bzl
deleted file mode 100644
index ce382cd..0000000
--- a/examples/opencensus_workspace.bzl
+++ /dev/null
@@ -1,1680 +0,0 @@
-# The following dependencies were calculated from:
-#
-# generate_workspace --artifact=com.google.guava:guava-jdk5:23.0 --artifact=com.google.guava:guava:23.0 --artifact=io.grpc:grpc-all:1.9.0 --artifact=io.opencensus:opencensus-api:0.16.1 --artifact=io.opencensus:opencensus-contrib-grpc-metrics:0.16.1 --artifact=io.opencensus:opencensus-contrib-zpages:0.16.1 --artifact=io.opencensus:opencensus-exporter-stats-prometheus:0.16.1 --artifact=io.opencensus:opencensus-exporter-stats-stackdriver:0.16.1 --artifact=io.opencensus:opencensus-exporter-trace-logging:0.16.1 --artifact=io.opencensus:opencensus-exporter-trace-stackdriver:0.16.1 --artifact=io.opencensus:opencensus-impl:0.16.1 --artifact=io.prometheus:simpleclient_httpserver:0.3.0 --repositories=http://repo.maven.apache.org/maven2
-
-
-def opencensus_maven_jars():
-  # io.opencensus:opencensus-api:jar:0.10.0 wanted version 3.0.1
-  # io.grpc:grpc-core:jar:1.9.0 wanted version 3.0.0
-  # com.google.guava:guava:bundle:23.0
-  # com.google.instrumentation:instrumentation-api:jar:0.4.3 wanted version 3.0.0
-  # io.opencensus:opencensus-contrib-grpc-metrics:jar:0.10.0 wanted version 3.0.1
-  native.maven_jar(
-      name = "com_google_code_findbugs_jsr305",
-      artifact = "com.google.code.findbugs:jsr305:2.0.2",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "516c03b21d50a644d538de0f0369c620989cd8f0",
-  )
-
-
-  # io.grpc:grpc-protobuf:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_protobuf_lite",
-      artifact = "io.grpc:grpc-protobuf-lite:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9dc9c6531ae0b304581adff0e9b7cff21a4073ac",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_exporter_stats_prometheus",
-      artifact = "io.opencensus:opencensus-exporter-stats-prometheus:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "c1e9fc26da3060dde5a5948fd065c1b28cd10f39",
-  )
-
-
-  # com.google.api:gax-grpc:jar:1.30.0 got requested version
-  # com.google.api:gax:jar:1.30.0
-  native.maven_jar(
-      name = "com_google_auth_google_auth_library_oauth2_http",
-      artifact = "com.google.auth:google-auth-library-oauth2-http:0.10.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "c079a62086121973a23d90f54e2b8c13050fa39d",
-  )
-
-
-  # io.netty:netty-handler-proxy:jar:4.1.17.Final got requested version
-  # io.netty:netty-codec:jar:4.1.17.Final
-  # io.netty:netty-handler:jar:4.1.17.Final got requested version
-  native.maven_jar(
-      name = "io_netty_netty_transport",
-      artifact = "io.netty:netty-transport:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9585776b0a8153182412b5d5366061ff486914c1",
-  )
-
-
-  # io.grpc:grpc-netty:jar:1.9.0
-  native.maven_jar(
-      name = "io_netty_netty_handler_proxy",
-      artifact = "io.netty:netty-handler-proxy:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9330ee60c4e48ca60aac89b7bc5ec2567e84f28e",
-  )
-
-
-  # io.grpc:grpc-all:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_protobuf_nano",
-      artifact = "io.grpc:grpc-protobuf-nano:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "561b03d3fd5178117a51f9f7ef9d9e5442ed2348",
-  )
-
-
-  # io.opencensus:opencensus-exporter-trace-stackdriver:jar:0.16.1
-  native.maven_jar(
-      name = "com_google_cloud_google_cloud_trace",
-      artifact = "com.google.cloud:google-cloud-trace:0.58.0-beta",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ea715c51340a32106ffdf32375a5dad9dbdf160e",
-  )
-
-
-  # org.apache.httpcomponents:httpclient:jar:4.5.3
-  native.maven_jar(
-      name = "commons_codec_commons_codec",
-      artifact = "commons-codec:commons-codec:1.9",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9ce04e34240f674bc72680f8b843b1457383161a",
-  )
-
-
-  # io.opencensus:opencensus-impl:jar:0.16.1
-  native.maven_jar(
-      name = "io_opencensus_opencensus_impl_core",
-      artifact = "io.opencensus:opencensus-impl-core:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "a87fc041f66b8c923e2a1de6b7c1582b7990fde8",
-  )
-
-
-  # io.prometheus:simpleclient_httpserver:bundle:0.4.0
-  native.maven_jar(
-      name = "io_prometheus_simpleclient_common",
-      artifact = "io.prometheus:simpleclient_common:0.3.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "c9656d515d3a7647407f2c221d56be13177b82a0",
-  )
-
-
-  # com.google.api:gax-grpc:jar:1.30.0 got requested version
-  # com.google.api:gax:jar:1.30.0
-  native.maven_jar(
-      name = "org_threeten_threetenbp",
-      artifact = "org.threeten:threetenbp:1.3.3",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "3ea31c96676ff12ab56be0b1af6fff61d1a4f1f2",
-  )
-
-
-  # io.grpc:grpc-core:jar:1.9.0 wanted version 2.1.2
-  # io.opencensus:opencensus-contrib-grpc-metrics:jar:0.10.0 wanted version 2.1.2
-  # com.google.guava:guava:bundle:23.0
-  # io.opencensus:opencensus-api:jar:0.10.0 wanted version 2.1.2
-  native.maven_jar(
-      name = "com_google_errorprone_error_prone_annotations",
-      artifact = "com.google.errorprone:error_prone_annotations:2.0.18",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "5f65affce1684999e2f4024983835efc3504012e",
-  )
-
-
-  # io.netty:netty-transport:jar:4.1.17.Final
-  native.maven_jar(
-      name = "io_netty_netty_resolver",
-      artifact = "io.netty:netty-resolver:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "8f386c80821e200f542da282ae1d3cde5cad8368",
-  )
-
-
-  # com.squareup.okhttp:okhttp:jar:2.5.0
-  # io.grpc:grpc-okhttp:jar:1.9.0 wanted version 1.13.0
-  native.maven_jar(
-      name = "com_squareup_okio_okio",
-      artifact = "com.squareup.okio:okio:1.6.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "98476622f10715998eacf9240d6b479f12c66143",
-  )
-
-
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 3.6.0
-  # io.grpc:grpc-protobuf:jar:1.9.0
-  # com.google.cloud:google-cloud-core:jar:1.40.0 wanted version 3.6.0
-  native.maven_jar(
-      name = "com_google_protobuf_protobuf_java_util",
-      artifact = "com.google.protobuf:protobuf-java-util:3.5.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "6e40a6a3f52455bd633aa2a0dba1a416e62b4575",
-  )
-
-
-  # io.grpc:grpc-auth:jar:1.9.0
-  # io.opencensus:opencensus-exporter-stats-stackdriver:jar:0.16.1 wanted version 0.10.0
-  # com.google.api:gax-grpc:jar:1.30.0 wanted version 0.10.0
-  # io.opencensus:opencensus-exporter-trace-stackdriver:jar:0.16.1 wanted version 0.10.0
-  # com.google.auth:google-auth-library-oauth2-http:jar:0.9.0 got requested version
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 0.10.0
-  native.maven_jar(
-      name = "com_google_auth_google_auth_library_credentials",
-      artifact = "com.google.auth:google-auth-library-credentials:0.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "8e2b181feff6005c9cbc6f5c1c1e2d3ec9138d46",
-  )
-
-
-  # com.google.api.grpc:proto-google-cloud-trace-v2:jar:0.23.0 got requested version
-  # com.google.api:gax:jar:1.30.0 got requested version
-  # com.google.api.grpc:proto-google-cloud-trace-v1:jar:0.23.0 got requested version
-  # com.google.api.grpc:proto-google-iam-v1:jar:0.12.0 wanted version 1.5.0
-  # com.google.api.grpc:proto-google-cloud-monitoring-v3:jar:1.22.0 got requested version
-  # com.google.cloud:google-cloud-core:jar:1.40.0
-  # com.google.api:gax-grpc:jar:1.30.0 got requested version
-  native.maven_jar(
-      name = "com_google_api_api_common",
-      artifact = "com.google.api:api-common:1.7.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ea59fb8b2450999345035dec8a6f472543391766",
-  )
-
-
-  # io.opencensus:opencensus-contrib-zpages:jar:0.16.1 got requested version
-  native.maven_jar(
-      name = "io_opencensus_opencensus_contrib_grpc_metrics",
-      artifact = "io.opencensus:opencensus-contrib-grpc-metrics:0.16.1",
-      sha1 = "f56b444e2766eaf597ee11c7501f0d6b9992395c",
-  )
-
-
-  # org.mockito:mockito-core:jar:1.9.5
-  native.maven_jar(
-      name = "org_objenesis_objenesis",
-      artifact = "org.objenesis:objenesis:1.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9b473564e792c2bdf1449da1f0b1b5bff9805704",
-  )
-
-
-  # io.netty:netty-buffer:jar:4.1.17.Final
-  # io.netty:netty-resolver:jar:4.1.17.Final got requested version
-  native.maven_jar(
-      name = "io_netty_netty_common",
-      artifact = "io.netty:netty-common:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "581c8ee239e4dc0976c2405d155f475538325098",
-  )
-
-
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta
-  native.maven_jar(
-      name = "com_google_api_grpc_proto_google_cloud_trace_v2",
-      artifact = "com.google.api.grpc:proto-google-cloud-trace-v2:0.23.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "4aa1bc7212d34791a02962092deafc43a7f4245e",
-  )
-
-
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta got requested version
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0 got requested version
-  native.maven_jar(
-      name = "io_grpc_grpc_netty_shaded",
-      artifact = "io.grpc:grpc-netty-shaded:1.13.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ccdc4f2c2791d93164c574fbfb90d614aa0849ae",
-  )
-
-
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta
-  native.maven_jar(
-      name = "com_google_api_grpc_proto_google_cloud_trace_v1",
-      artifact = "com.google.api.grpc:proto-google-cloud-trace-v1:0.23.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "848bb2c3b9d683dccc2a26d077015cdc71b7e343",
-  )
-
-
-  # io.grpc:grpc-all:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_okhttp",
-      artifact = "io.grpc:grpc-okhttp:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "4e7fbb9d3cd65848f42494de165b1c5839f69a8a",
-  )
-
-
-  # junit:junit:jar:4.12
-  native.maven_jar(
-      name = "org_hamcrest_hamcrest_core",
-      artifact = "org.hamcrest:hamcrest-core:1.3",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "42a25dc3219429f0e5d060061f71acb49bf010a0",
-  )
-
-
-  # io.netty:netty-codec-http2:jar:4.1.17.Final
-  native.maven_jar(
-      name = "io_netty_netty_handler",
-      artifact = "io.netty:netty-handler:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "18c40ffb61a1d1979eca024087070762fdc4664a",
-  )
-
-
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_api_grpc_proto_google_cloud_monitoring_v3",
-      artifact = "com.google.api.grpc:proto-google-cloud-monitoring-v3:1.22.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "5b8746703e9d8f2937d4925a70b030cfc5bf00f6",
-  )
-
-
-  # com.google.auth:google-auth-library-oauth2-http:jar:0.9.0 wanted version 1.19.0
-  # com.google.cloud:google-cloud-core:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_http_client_google_http_client",
-      artifact = "com.google.http-client:google-http-client:1.24.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "396eac8d3fb1332675f82b208f48a469d64f3b4a",
-  )
-
-
-  native.maven_jar(
-      name = "io_prometheus_simpleclient_httpserver",
-      artifact = "io.prometheus:simpleclient_httpserver:0.3.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "a2c1aeecac28f5bfa9a92a67b071d246ac00bbec",
-  )
-
-
-  # io.grpc:grpc-core:jar:1.9.0
-  native.maven_jar(
-      name = "com_google_instrumentation_instrumentation_api",
-      artifact = "com.google.instrumentation:instrumentation-api:0.4.3",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "41614af3429573dc02645d541638929d877945a2",
-  )
-
-
-  # com.google.auth:google-auth-library-oauth2-http:jar:0.9.0
-  native.maven_jar(
-      name = "com_google_http_client_google_http_client_jackson2",
-      artifact = "com.google.http-client:google-http-client-jackson2:1.19.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "81dbf9795d387d5e80e55346582d5f2fb81a42eb",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_exporter_trace_logging",
-      artifact = "io.opencensus:opencensus-exporter-trace-logging:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "a3ca83ff7075c58e564aa029c35ccd8224616879",
-  )
-
-
-  # com.google.api:gax-grpc:jar:1.30.0 wanted version 1.13.1
-  # io.grpc:grpc-all:jar:1.9.0
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 1.13.1
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0 wanted version 1.13.1
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta wanted version 1.13.1
-  native.maven_jar(
-      name = "io_grpc_grpc_auth",
-      artifact = "io.grpc:grpc-auth:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "d2eadc6d28ebee8ec0cef74f882255e4069972ad",
-  )
-
-
-  # com.google.cloud:google-cloud-core:jar:1.40.0
-  # com.google.api:gax-grpc:jar:1.30.0 got requested version
-  native.maven_jar(
-      name = "com_google_api_gax",
-      artifact = "com.google.api:gax:1.30.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "58fa2feb11b092be0a6ebe705a28736f12374230",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_exporter_trace_stackdriver",
-      artifact = "io.opencensus:opencensus-exporter-trace-stackdriver:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "6ea1a99a5cc580f472fbddf34152b3dcd6929e88",
-  )
-
-
-  # com.google.guava:guava:bundle:23.0
-  native.maven_jar(
-      name = "com_google_j2objc_j2objc_annotations",
-      artifact = "com.google.j2objc:j2objc-annotations:1.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ed28ded51a8b1c6b112568def5f4b455e6809019",
-  )
-
-
-  # io.grpc:grpc-auth:jar:1.9.0
-  # io.grpc:grpc-protobuf:jar:1.9.0 got requested version
-  # io.grpc:grpc-okhttp:jar:1.9.0 got requested version
-  # io.grpc:grpc-stub:jar:1.9.0 got requested version
-  # io.grpc:grpc-protobuf-lite:jar:1.9.0 got requested version
-  # io.grpc:grpc-all:jar:1.9.0 got requested version
-  # io.grpc:grpc-protobuf-nano:jar:1.9.0 got requested version
-  # io.grpc:grpc-testing:jar:1.9.0 got requested version
-  # io.grpc:grpc-netty:jar:1.9.0 got requested version
-  # io.grpc:grpc-netty-shaded:jar:1.13.1 wanted version 1.13.1
-  native.maven_jar(
-      name = "io_grpc_grpc_core",
-      artifact = "io.grpc:grpc-core:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "cf76ab13d35e8bd5d0ffad6d82bb1ef1770f050c",
-  )
-
-
-  # io.opencensus:opencensus-exporter-stats-stackdriver:jar:0.16.1
-  # io.opencensus:opencensus-exporter-trace-stackdriver:jar:0.16.1 got requested version
-  native.maven_jar(
-      name = "io_opencensus_opencensus_contrib_monitored_resource_util",
-      artifact = "io.opencensus:opencensus-contrib-monitored-resource-util:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "9edb4161978ac89f99a69544bfdc71b018a2509d",
-  )
-
-
-  # com.google.cloud:google-cloud-core:jar:1.40.0
-  native.maven_jar(
-      name = "joda_time_joda_time",
-      artifact = "joda-time:joda-time:2.9.2",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "36d6e77a419cb455e6fd5909f6f96b168e21e9d0",
-  )
-
-
-  # io.grpc:grpc-testing:jar:1.9.0
-  native.maven_jar(
-      name = "org_mockito_mockito_core",
-      artifact = "org.mockito:mockito-core:1.9.5",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "c3264abeea62c4d2f367e21484fbb40c7e256393",
-  )
-
-
-  # org.apache.httpcomponents:httpclient:jar:4.5.3
-  native.maven_jar(
-      name = "org_apache_httpcomponents_httpcore",
-      artifact = "org.apache.httpcomponents:httpcore:4.4.6",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "e3fd8ced1f52c7574af952e2e6da0df8df08eb82",
-  )
-
-
-  # io.opencensus:opencensus-impl:jar:0.16.1
-  native.maven_jar(
-      name = "com_lmax_disruptor",
-      artifact = "com.lmax:disruptor:3.4.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "72fabfe8a183f53bf61e0303921b7a89d2e8daed",
-  )
-
-
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 3.6.0
-  # com.google.api.grpc:proto-google-cloud-trace-v1:jar:0.23.0 wanted version 3.6.0
-  # com.google.api.grpc:proto-google-cloud-monitoring-v3:jar:1.22.0 wanted version 3.6.0
-  # io.grpc:grpc-protobuf:jar:1.9.0
-  # com.google.api.grpc:proto-google-iam-v1:jar:0.12.0 got requested version
-  # com.google.protobuf:protobuf-java-util:bundle:3.5.1 got requested version
-  # com.google.api.grpc:proto-google-cloud-trace-v2:jar:0.23.0 wanted version 3.6.0
-  native.maven_jar(
-      name = "com_google_protobuf_protobuf_java",
-      artifact = "com.google.protobuf:protobuf-java:3.5.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "8c3492f7662fa1cbf8ca76a0f5eb1146f7725acd",
-  )
-
-
-  # io.grpc:grpc-okhttp:jar:1.9.0
-  native.maven_jar(
-      name = "com_squareup_okhttp_okhttp",
-      artifact = "com.squareup.okhttp:okhttp:2.5.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "4de2b4ed3445c37ec1720a7d214712e845a24636",
-  )
-
-
-  # io.grpc:grpc-testing:jar:1.9.0 got requested version
-  # com.google.api:gax-grpc:jar:1.30.0 wanted version 1.13.1
-  # io.grpc:grpc-all:jar:1.9.0
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 1.13.1
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0 wanted version 1.13.1
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta wanted version 1.13.1
-  native.maven_jar(
-      name = "io_grpc_grpc_stub",
-      artifact = "io.grpc:grpc-stub:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "20e310f888860a27dfa509a69eebb236417ee93f",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_impl",
-      artifact = "io.opencensus:opencensus-impl:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "f9b06bf8422ba3700346173524087d005725432e",
-  )
-
-
-  # com.google.api:gax-grpc:jar:1.30.0 wanted version 1.13.1
-  # io.grpc:grpc-all:jar:1.9.0
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 1.13.1
-  native.maven_jar(
-      name = "io_grpc_grpc_protobuf",
-      artifact = "io.grpc:grpc-protobuf:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "94ca247577e4cf1a38d5ac9d536ac1d426a1ccc5",
-  )
-
-
-  # io.netty:netty-handler-proxy:jar:4.1.17.Final
-  native.maven_jar(
-      name = "io_netty_netty_codec_socks",
-      artifact = "io.netty:netty-codec-socks:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "a159bf1f3d5019e0d561c92fbbec8400967471fa",
-  )
-
-
-  # io.netty:netty-codec-http:jar:4.1.17.Final
-  # io.netty:netty-codec-socks:jar:4.1.17.Final got requested version
-  # io.netty:netty-handler:jar:4.1.17.Final got requested version
-  native.maven_jar(
-      name = "io_netty_netty_codec",
-      artifact = "io.netty:netty-codec:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "1d00f56dc9e55203a4bde5aae3d0828fdeb818e7",
-  )
-
-
-  # io.netty:netty-transport:jar:4.1.17.Final
-  # io.netty:netty-handler:jar:4.1.17.Final got requested version
-  native.maven_jar(
-      name = "io_netty_netty_buffer",
-      artifact = "io.netty:netty-buffer:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "fdd68fb3defd7059a7392b9395ee941ef9bacc25",
-  )
-
-
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta got requested version
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_cloud_google_cloud_core_grpc",
-      artifact = "com.google.cloud:google-cloud-core-grpc:1.40.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "f1f7a81915728eb53b9d3832f3ccec53ea181664",
-  )
-
-
-  # io.grpc:grpc-all:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_netty",
-      artifact = "io.grpc:grpc-netty:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "8157384d87497dc18604a5ba3760763fe643f16e",
-  )
-
-
-  # io.grpc:grpc-all:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_testing",
-      artifact = "io.grpc:grpc-testing:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "3d20675f0e64825f565a7d21456e7dbdd5886c6b",
-  )
-
-
-  # io.opencensus:opencensus-impl:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-exporter-trace-stackdriver:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-exporter-trace-logging:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-contrib-grpc-metrics:jar:0.10.0 wanted version 0.10.0
-  # io.opencensus:opencensus-exporter-stats-prometheus:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-contrib-zpages:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-exporter-stats-stackdriver:jar:0.16.1 got requested version
-  # io.opencensus:opencensus-impl-core:jar:0.16.1 got requested version
-  native.maven_jar(
-      name = "io_opencensus_opencensus_api",
-      artifact = "io.opencensus:opencensus-api:0.16.1",
-      sha1 = "ec5d81a80d9c010c50368ad9045d512828d0d62d",
-  )
-
-
-  # io.grpc:grpc-testing:jar:1.9.0
-  native.maven_jar(
-      name = "junit_junit",
-      artifact = "junit:junit:4.12",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "2973d150c0dc1fefe998f834810d68f278ea58ec",
-  )
-
-
-  # io.prometheus:simpleclient_httpserver:bundle:0.4.0 wanted version 0.3.0
-  # io.prometheus:simpleclient_common:bundle:0.4.0 wanted version 0.3.0
-  # io.opencensus:opencensus-exporter-stats-prometheus:jar:0.16.1
-  native.maven_jar(
-      name = "io_prometheus_simpleclient",
-      artifact = "io.prometheus:simpleclient:0.4.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "99c293bbf9461587b2179273b6fdc349582a1021",
-  )
-
-
-  # com.google.guava:guava:bundle:23.0
-  native.maven_jar(
-      name = "org_codehaus_mojo_animal_sniffer_annotations",
-      artifact = "org.codehaus.mojo:animal-sniffer-annotations:1.14",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "775b7e22fb10026eed3f86e8dc556dfafe35f2d5",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_exporter_stats_stackdriver",
-      artifact = "io.opencensus:opencensus-exporter-stats-stackdriver:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "e4e7152e53c7683e92a1ddae15a2e13eeaa7714e",
-  )
-
-
-  # io.netty:netty-handler-proxy:jar:4.1.17.Final got requested version
-  # io.netty:netty-codec-http2:jar:4.1.17.Final
-  native.maven_jar(
-      name = "io_netty_netty_codec_http",
-      artifact = "io.netty:netty-codec-http:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "251d7edcb897122b9b23f24ff793cd0739056b9e",
-  )
-
-
-  # org.apache.httpcomponents:httpclient:jar:4.5.3
-  native.maven_jar(
-      name = "commons_logging_commons_logging",
-      artifact = "commons-logging:commons-logging:1.2",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "4bfc12adfe4842bf07b657f0369c4cb522955686",
-  )
-
-
-  # io.grpc:grpc-netty:jar:1.9.0
-  native.maven_jar(
-      name = "io_netty_netty_codec_http2",
-      artifact = "io.netty:netty-codec-http2:4.1.17.Final",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "f9844005869c6d9049f4b677228a89fee4c6eab3",
-  )
-
-
-  # com.google.protobuf:protobuf-java-util:bundle:3.5.1
-  native.maven_jar(
-      name = "com_google_code_gson_gson",
-      artifact = "com.google.code.gson:gson:2.7",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "751f548c85fa49f330cecbb1875893f971b33c4e",
-  )
-
-
-  # io.grpc:grpc-protobuf-nano:jar:1.9.0
-  native.maven_jar(
-      name = "com_google_protobuf_nano_protobuf_javanano",
-      artifact = "com.google.protobuf.nano:protobuf-javanano:3.0.0-alpha-5",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "357e60f95cebb87c72151e49ba1f570d899734f8",
-  )
-
-
-  # com.google.http-client:google-http-client:jar:1.24.1
-  native.maven_jar(
-      name = "org_apache_httpcomponents_httpclient",
-      artifact = "org.apache.httpcomponents:httpclient:4.5.3",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "d1577ae15f01ef5438c5afc62162457c00a34713",
-  )
-
-
-  # com.google.cloud:google-cloud-core:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_api_grpc_proto_google_iam_v1",
-      artifact = "com.google.api.grpc:proto-google-iam-v1:0.12.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ea312c0250a5d0a7cdd1b20bc2c3259938b79855",
-  )
-
-
-  # io.opencensus:opencensus-api:jar:0.10.0 wanted version 1.8.0
-  # io.grpc:grpc-all:jar:1.9.0 got requested version
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 wanted version 1.13.1
-  # io.grpc:grpc-core:jar:1.9.0
-  native.maven_jar(
-      name = "io_grpc_grpc_context",
-      artifact = "io.grpc:grpc-context:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "28b0836f48c9705abf73829bbc536dba29a1329a",
-  )
-
-
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_api_gax_grpc",
-      artifact = "com.google.api:gax-grpc:1.30.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "ada82a4a0c020807e1c1a674b18658374264e401",
-  )
-
-
-  # com.google.api.grpc:proto-google-cloud-monitoring-v3:jar:1.22.0 wanted version 1.12.0
-  # com.google.api.grpc:proto-google-iam-v1:jar:0.12.0 wanted version 1.11.0
-  # com.google.api:gax-grpc:jar:1.30.0 wanted version 1.12.0
-  # com.google.api.grpc:proto-google-cloud-trace-v1:jar:0.23.0 wanted version 1.12.0
-  # io.grpc:grpc-protobuf:jar:1.9.0
-  # com.google.api.grpc:proto-google-cloud-trace-v2:jar:0.23.0 wanted version 1.12.0
-  # com.google.cloud:google-cloud-core:jar:1.40.0 wanted version 1.12.0
-  native.maven_jar(
-      name = "com_google_api_grpc_proto_google_common_protos",
-      artifact = "com.google.api.grpc:proto-google-common-protos:1.0.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "86f070507e28b930e50d218ee5b6788ef0dd05e6",
-  )
-
-
-  native.maven_jar(
-      name = "io_opencensus_opencensus_contrib_zpages",
-      artifact = "io.opencensus:opencensus-contrib-zpages:0.16.1",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "5fe09e41a9435281eb4547bc57ae34b9fd6bbf21",
-  )
-
-
-  # io.opencensus:opencensus-exporter-trace-stackdriver:jar:0.16.1 wanted version 20.0
-  # io.opencensus:opencensus-exporter-stats-prometheus:jar:0.16.1 wanted version 20.0
-  # io.opencensus:opencensus-exporter-stats-stackdriver:jar:0.16.1 wanted version 20.0
-  # io.grpc:grpc-protobuf-lite:jar:1.9.0 wanted version 19.0
-  # com.google.instrumentation:instrumentation-api:jar:0.4.3 wanted version 19.0
-  # io.grpc:grpc-protobuf:jar:1.9.0 wanted version 19.0
-  # io.opencensus:opencensus-contrib-zpages:jar:0.16.1 wanted version 20.0
-  # io.opencensus:opencensus-impl-core:jar:0.16.1 wanted version 20.0
-  # io.opencensus:opencensus-exporter-trace-logging:jar:0.16.1 wanted version 20.0
-  # io.grpc:grpc-protobuf-nano:jar:1.9.0 wanted version 19.0
-  # io.grpc:grpc-core:jar:1.9.0 wanted version 19.0
-  # com.google.protobuf:protobuf-java-util:bundle:3.5.1 wanted version 19.0
-  # io.opencensus:opencensus-api:jar:0.10.0 wanted version 19.0
-  native.maven_jar(
-      name = "com_google_guava_guava",
-      artifact = "com.google.guava:guava:23.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "c947004bb13d18182be60077ade044099e4f26f1",
-  )
-
-
-  native.maven_jar(
-      name = "io_grpc_grpc_all",
-      artifact = "io.grpc:grpc-all:1.9.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "442dfac27fd072e15b7134ab02c2b38136036090",
-  )
-
-
-  # com.google.cloud:google-cloud-core-grpc:jar:1.40.0 got requested version
-  # com.google.cloud:google-cloud-trace:jar:0.58.0-beta got requested version
-  # com.google.cloud:google-cloud-monitoring:jar:1.40.0
-  native.maven_jar(
-      name = "com_google_cloud_google_cloud_core",
-      artifact = "com.google.cloud:google-cloud-core:1.40.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "4985701f989030e262cf8f4e38cc954115f5b082",
-  )
-
-
-  # io.opencensus:opencensus-exporter-stats-stackdriver:jar:0.16.1
-  native.maven_jar(
-      name = "com_google_cloud_google_cloud_monitoring",
-      artifact = "com.google.cloud:google-cloud-monitoring:1.40.0",
-      repository = "http://repo.maven.apache.org/maven2/",
-      sha1 = "f03d20d67a5f3b0cd0685225a6ea5339d208aa53",
-  )
-
-
-
-
-def opencensus_java_libraries():
-  native.java_library(
-      name = "com_google_code_findbugs_jsr305",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_code_findbugs_jsr305//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_protobuf_lite",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_protobuf_lite//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":io_grpc_grpc_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_exporter_stats_prometheus",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_exporter_stats_prometheus//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":io_opencensus_opencensus_api",
-          ":io_prometheus_simpleclient",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_auth_google_auth_library_oauth2_http",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_auth_google_auth_library_oauth2_http//jar"],
-      runtime_deps = [
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_http_client_google_http_client",
-          ":com_google_http_client_google_http_client_jackson2",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_transport",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_transport//jar"],
-      runtime_deps = [
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_common",
-          ":io_netty_netty_resolver",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_handler_proxy",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_handler_proxy//jar"],
-      runtime_deps = [
-          ":io_netty_netty_codec",
-          ":io_netty_netty_codec_http",
-          ":io_netty_netty_codec_socks",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_protobuf_nano",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_protobuf_nano//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":com_google_protobuf_nano_protobuf_javanano",
-          ":io_grpc_grpc_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_cloud_google_cloud_trace",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_cloud_google_cloud_trace//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_cloud_trace_v1",
-          ":com_google_api_grpc_proto_google_cloud_trace_v2",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_cloud_google_cloud_core",
-          ":com_google_cloud_google_cloud_core_grpc",
-          ":com_google_protobuf_protobuf_java",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_netty_shaded",
-          ":io_grpc_grpc_stub",
-      ],
-  )
-
-
-  native.java_library(
-      name = "commons_codec_commons_codec",
-      visibility = ["//visibility:public"],
-      exports = ["@commons_codec_commons_codec//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_impl_core",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_impl_core//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":io_opencensus_opencensus_api",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_prometheus_simpleclient_common",
-      visibility = ["//visibility:public"],
-      exports = ["@io_prometheus_simpleclient_common//jar"],
-      runtime_deps = [
-          ":io_prometheus_simpleclient",
-      ],
-  )
-
-
-  native.java_library(
-      name = "org_threeten_threetenbp",
-      visibility = ["//visibility:public"],
-      exports = ["@org_threeten_threetenbp//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_errorprone_error_prone_annotations",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_errorprone_error_prone_annotations//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_resolver",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_resolver//jar"],
-      runtime_deps = [
-          ":io_netty_netty_common",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_squareup_okio_okio",
-      visibility = ["//visibility:public"],
-      exports = ["@com_squareup_okio_okio//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_protobuf_protobuf_java_util",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_protobuf_protobuf_java_util//jar"],
-      runtime_deps = [
-          ":com_google_code_gson_gson",
-          ":com_google_guava_guava",
-          ":com_google_protobuf_protobuf_java",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_auth_google_auth_library_credentials",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_auth_google_auth_library_credentials//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_api_common",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_api_common//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_contrib_grpc_metrics",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_contrib_grpc_metrics//jar"],
-      runtime_deps = [
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_errorprone_error_prone_annotations",
-          ":io_opencensus_opencensus_api",
-      ],
-  )
-
-
-  native.java_library(
-      name = "org_objenesis_objenesis",
-      visibility = ["//visibility:public"],
-      exports = ["@org_objenesis_objenesis//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_common",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_common//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_grpc_proto_google_cloud_trace_v2",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_grpc_proto_google_cloud_trace_v2//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_protobuf_protobuf_java",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_netty_shaded",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_netty_shaded//jar"],
-      runtime_deps = [
-          ":io_grpc_grpc_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_grpc_proto_google_cloud_trace_v1",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_grpc_proto_google_cloud_trace_v1//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_protobuf_protobuf_java",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_okhttp",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_okhttp//jar"],
-      runtime_deps = [
-          ":com_squareup_okhttp_okhttp",
-          ":com_squareup_okio_okio",
-          ":io_grpc_grpc_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "org_hamcrest_hamcrest_core",
-      visibility = ["//visibility:public"],
-      exports = ["@org_hamcrest_hamcrest_core//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_handler",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_handler//jar"],
-      runtime_deps = [
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_codec",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_grpc_proto_google_cloud_monitoring_v3",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_grpc_proto_google_cloud_monitoring_v3//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_protobuf_protobuf_java",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_http_client_google_http_client",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_http_client_google_http_client//jar"],
-      runtime_deps = [
-          ":commons_codec_commons_codec",
-          ":commons_logging_commons_logging",
-          ":org_apache_httpcomponents_httpclient",
-          ":org_apache_httpcomponents_httpcore",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_prometheus_simpleclient_httpserver",
-      visibility = ["//visibility:public"],
-      exports = ["@io_prometheus_simpleclient_httpserver//jar"],
-      runtime_deps = [
-          ":io_prometheus_simpleclient",
-          ":io_prometheus_simpleclient_common",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_instrumentation_instrumentation_api",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_instrumentation_instrumentation_api//jar"],
-      runtime_deps = [
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_guava_guava",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_http_client_google_http_client_jackson2",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_http_client_google_http_client_jackson2//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_exporter_trace_logging",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_exporter_trace_logging//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":io_opencensus_opencensus_api",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_auth",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_auth//jar"],
-      runtime_deps = [
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_errorprone_error_prone_annotations",
-          ":com_google_guava_guava",
-          ":com_google_instrumentation_instrumentation_api",
-          ":io_grpc_grpc_context",
-          ":io_grpc_grpc_core",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_grpc_metrics",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_gax",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_gax//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":com_google_http_client_google_http_client",
-          ":com_google_http_client_google_http_client_jackson2",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_exporter_trace_stackdriver",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_exporter_trace_stackdriver//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_cloud_trace_v1",
-          ":com_google_api_grpc_proto_google_cloud_trace_v2",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_cloud_google_cloud_core",
-          ":com_google_cloud_google_cloud_core_grpc",
-          ":com_google_cloud_google_cloud_trace",
-          ":com_google_guava_guava",
-          ":com_google_protobuf_protobuf_java",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_netty_shaded",
-          ":io_grpc_grpc_stub",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_monitored_resource_util",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_j2objc_j2objc_annotations",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_j2objc_j2objc_annotations//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_core",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_core//jar"],
-      runtime_deps = [
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_errorprone_error_prone_annotations",
-          ":com_google_guava_guava",
-          ":com_google_instrumentation_instrumentation_api",
-          ":io_grpc_grpc_context",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_grpc_metrics",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_contrib_monitored_resource_util",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_contrib_monitored_resource_util//jar"],
-  )
-
-
-  native.java_library(
-      name = "joda_time_joda_time",
-      visibility = ["//visibility:public"],
-      exports = ["@joda_time_joda_time//jar"],
-  )
-
-
-  native.java_library(
-      name = "org_mockito_mockito_core",
-      visibility = ["//visibility:public"],
-      exports = ["@org_mockito_mockito_core//jar"],
-      runtime_deps = [
-          ":org_objenesis_objenesis",
-      ],
-  )
-
-
-  native.java_library(
-      name = "org_apache_httpcomponents_httpcore",
-      visibility = ["//visibility:public"],
-      exports = ["@org_apache_httpcomponents_httpcore//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_lmax_disruptor",
-      visibility = ["//visibility:public"],
-      exports = ["@com_lmax_disruptor//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_protobuf_protobuf_java",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_protobuf_protobuf_java//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_squareup_okhttp_okhttp",
-      visibility = ["//visibility:public"],
-      exports = ["@com_squareup_okhttp_okhttp//jar"],
-      runtime_deps = [
-          ":com_squareup_okio_okio",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_stub",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_stub//jar"],
-      runtime_deps = [
-          ":io_grpc_grpc_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_impl",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_impl//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":com_lmax_disruptor",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_impl_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_protobuf",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_protobuf//jar"],
-      runtime_deps = [
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_code_gson_gson",
-          ":com_google_guava_guava",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_protobuf_lite",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_codec_socks",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_codec_socks//jar"],
-      runtime_deps = [
-          ":io_netty_netty_codec",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_codec",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_codec//jar"],
-      runtime_deps = [
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_common",
-          ":io_netty_netty_resolver",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_buffer",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_buffer//jar"],
-      runtime_deps = [
-          ":io_netty_netty_common",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_cloud_google_cloud_core_grpc",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_cloud_google_cloud_core_grpc//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_gax",
-          ":com_google_api_gax_grpc",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":com_google_cloud_google_cloud_core",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_context",
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_netty_shaded",
-          ":io_grpc_grpc_protobuf",
-          ":io_grpc_grpc_stub",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_netty",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_netty//jar"],
-      runtime_deps = [
-          ":io_grpc_grpc_core",
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_codec",
-          ":io_netty_netty_codec_http",
-          ":io_netty_netty_codec_http2",
-          ":io_netty_netty_codec_socks",
-          ":io_netty_netty_common",
-          ":io_netty_netty_handler",
-          ":io_netty_netty_handler_proxy",
-          ":io_netty_netty_resolver",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_testing",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_testing//jar"],
-      runtime_deps = [
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_stub",
-          ":junit_junit",
-          ":org_hamcrest_hamcrest_core",
-          ":org_mockito_mockito_core",
-          ":org_objenesis_objenesis",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_api",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_api//jar"],
-      runtime_deps = [
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_errorprone_error_prone_annotations",
-          ":com_google_guava_guava",
-          ":io_grpc_grpc_context",
-      ],
-  )
-
-
-  native.java_library(
-      name = "junit_junit",
-      visibility = ["//visibility:public"],
-      exports = ["@junit_junit//jar"],
-      runtime_deps = [
-          ":org_hamcrest_hamcrest_core",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_prometheus_simpleclient",
-      visibility = ["//visibility:public"],
-      exports = ["@io_prometheus_simpleclient//jar"],
-  )
-
-
-  native.java_library(
-      name = "org_codehaus_mojo_animal_sniffer_annotations",
-      visibility = ["//visibility:public"],
-      exports = ["@org_codehaus_mojo_animal_sniffer_annotations//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_exporter_stats_stackdriver",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_exporter_stats_stackdriver//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_gax",
-          ":com_google_api_gax_grpc",
-          ":com_google_api_grpc_proto_google_cloud_monitoring_v3",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_api_grpc_proto_google_iam_v1",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":com_google_cloud_google_cloud_core",
-          ":com_google_cloud_google_cloud_core_grpc",
-          ":com_google_cloud_google_cloud_monitoring",
-          ":com_google_guava_guava",
-          ":com_google_http_client_google_http_client",
-          ":com_google_http_client_google_http_client_jackson2",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":commons_codec_commons_codec",
-          ":commons_logging_commons_logging",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_context",
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_netty_shaded",
-          ":io_grpc_grpc_protobuf",
-          ":io_grpc_grpc_stub",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_monitored_resource_util",
-          ":joda_time_joda_time",
-          ":org_apache_httpcomponents_httpclient",
-          ":org_apache_httpcomponents_httpcore",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_codec_http",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_codec_http//jar"],
-      runtime_deps = [
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_codec",
-          ":io_netty_netty_common",
-          ":io_netty_netty_resolver",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "commons_logging_commons_logging",
-      visibility = ["//visibility:public"],
-      exports = ["@commons_logging_commons_logging//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_netty_netty_codec_http2",
-      visibility = ["//visibility:public"],
-      exports = ["@io_netty_netty_codec_http2//jar"],
-      runtime_deps = [
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_codec",
-          ":io_netty_netty_codec_http",
-          ":io_netty_netty_common",
-          ":io_netty_netty_handler",
-          ":io_netty_netty_resolver",
-          ":io_netty_netty_transport",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_code_gson_gson",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_code_gson_gson//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_protobuf_nano_protobuf_javanano",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_protobuf_nano_protobuf_javanano//jar"],
-  )
-
-
-  native.java_library(
-      name = "org_apache_httpcomponents_httpclient",
-      visibility = ["//visibility:public"],
-      exports = ["@org_apache_httpcomponents_httpclient//jar"],
-      runtime_deps = [
-          ":commons_codec_commons_codec",
-          ":commons_logging_commons_logging",
-          ":org_apache_httpcomponents_httpcore",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_grpc_proto_google_iam_v1",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_grpc_proto_google_iam_v1//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_protobuf_protobuf_java",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_context",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_context//jar"],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_gax_grpc",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_gax_grpc//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_gax",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_protobuf",
-          ":io_grpc_grpc_stub",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_api_grpc_proto_google_common_protos",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_api_grpc_proto_google_common_protos//jar"],
-  )
-
-
-  native.java_library(
-      name = "io_opencensus_opencensus_contrib_zpages",
-      visibility = ["//visibility:public"],
-      exports = ["@io_opencensus_opencensus_contrib_zpages//jar"],
-      runtime_deps = [
-          ":com_google_guava_guava",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_grpc_metrics",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_guava_guava",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_guava_guava//jar"],
-      runtime_deps = [
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_errorprone_error_prone_annotations",
-          ":com_google_j2objc_j2objc_annotations",
-          ":org_codehaus_mojo_animal_sniffer_annotations",
-      ],
-  )
-
-
-  native.java_library(
-      name = "io_grpc_grpc_all",
-      visibility = ["//visibility:public"],
-      exports = ["@io_grpc_grpc_all//jar"],
-      runtime_deps = [
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_code_findbugs_jsr305",
-          ":com_google_code_gson_gson",
-          ":com_google_errorprone_error_prone_annotations",
-          ":com_google_guava_guava",
-          ":com_google_instrumentation_instrumentation_api",
-          ":com_google_protobuf_nano_protobuf_javanano",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":com_squareup_okhttp_okhttp",
-          ":com_squareup_okio_okio",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_context",
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_netty",
-          ":io_grpc_grpc_okhttp",
-          ":io_grpc_grpc_protobuf",
-          ":io_grpc_grpc_protobuf_lite",
-          ":io_grpc_grpc_protobuf_nano",
-          ":io_grpc_grpc_stub",
-          ":io_grpc_grpc_testing",
-          ":io_netty_netty_buffer",
-          ":io_netty_netty_codec",
-          ":io_netty_netty_codec_http",
-          ":io_netty_netty_codec_http2",
-          ":io_netty_netty_codec_socks",
-          ":io_netty_netty_common",
-          ":io_netty_netty_handler",
-          ":io_netty_netty_handler_proxy",
-          ":io_netty_netty_resolver",
-          ":io_netty_netty_transport",
-          ":io_opencensus_opencensus_api",
-          ":io_opencensus_opencensus_contrib_grpc_metrics",
-          ":junit_junit",
-          ":org_hamcrest_hamcrest_core",
-          ":org_mockito_mockito_core",
-          ":org_objenesis_objenesis",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_cloud_google_cloud_core",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_cloud_google_cloud_core//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_gax",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_api_grpc_proto_google_iam_v1",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":com_google_http_client_google_http_client",
-          ":com_google_http_client_google_http_client_jackson2",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":commons_codec_commons_codec",
-          ":commons_logging_commons_logging",
-          ":joda_time_joda_time",
-          ":org_apache_httpcomponents_httpclient",
-          ":org_apache_httpcomponents_httpcore",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
-  native.java_library(
-      name = "com_google_cloud_google_cloud_monitoring",
-      visibility = ["//visibility:public"],
-      exports = ["@com_google_cloud_google_cloud_monitoring//jar"],
-      runtime_deps = [
-          ":com_google_api_api_common",
-          ":com_google_api_gax",
-          ":com_google_api_gax_grpc",
-          ":com_google_api_grpc_proto_google_cloud_monitoring_v3",
-          ":com_google_api_grpc_proto_google_common_protos",
-          ":com_google_api_grpc_proto_google_iam_v1",
-          ":com_google_auth_google_auth_library_credentials",
-          ":com_google_auth_google_auth_library_oauth2_http",
-          ":com_google_cloud_google_cloud_core",
-          ":com_google_cloud_google_cloud_core_grpc",
-          ":com_google_http_client_google_http_client",
-          ":com_google_http_client_google_http_client_jackson2",
-          ":com_google_protobuf_protobuf_java",
-          ":com_google_protobuf_protobuf_java_util",
-          ":commons_codec_commons_codec",
-          ":commons_logging_commons_logging",
-          ":io_grpc_grpc_auth",
-          ":io_grpc_grpc_context",
-          ":io_grpc_grpc_core",
-          ":io_grpc_grpc_netty_shaded",
-          ":io_grpc_grpc_protobuf",
-          ":io_grpc_grpc_stub",
-          ":joda_time_joda_time",
-          ":org_apache_httpcomponents_httpclient",
-          ":org_apache_httpcomponents_httpcore",
-          ":org_threeten_threetenbp",
-      ],
-  )
-
-
diff --git a/examples/pom.xml b/examples/pom.xml
index 5f08312..e12e8e3 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -1,17 +1,19 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>io.opencensus</groupId>
   <artifactId>opencensus-examples</artifactId>
   <packaging>jar</packaging>
-  <version>0.17.0-SNAPSHOT</version><!-- CURRENT_OPENCENSUS_VERSION -->
+  <version>0.32.0-SNAPSHOT</version><!-- CURRENT_OPENCENSUS_VERSION -->
   <name>opencensus-examples</name>
   <url>http://maven.apache.org</url>
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <!-- change to the version you want to use. -->
-    <opencensus.version>0.16.1</opencensus.version><!-- LATEST_OPENCENSUS_RELEASE_VERSION -->
-    <grpc.version>1.13.1</grpc.version><!-- CURRENT_GRPC_VERSION -->
+    <jetty.version>9.4.41.v20210516</jetty.version>
+    <opencensus.version>0.28.3</opencensus.version> <!-- LATEST_OPENCENSUS_RELEASE_VERSION -->
+    <grpc.version>1.32.2</grpc.version> <!-- CURRENT_GRPC_VERSION -->
   </properties>
   <dependencies>
     <dependency>
@@ -26,13 +28,39 @@
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-contrib-http-util</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-contrib-http-jetty-client</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-contrib-http-servlet</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
       <artifactId>opencensus-contrib-zpages</artifactId>
       <version>${opencensus.version}</version>
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-exporter-metrics-ocagent</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
       <artifactId>opencensus-exporter-stats-stackdriver</artifactId>
       <version>${opencensus.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>io.grpc</groupId>
+          <artifactId>grpc-api</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
@@ -41,8 +69,19 @@
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-exporter-trace-jaeger</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
       <artifactId>opencensus-exporter-trace-stackdriver</artifactId>
       <version>${opencensus.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>io.grpc</groupId>
+          <artifactId>grpc-api</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
@@ -50,12 +89,27 @@
       <version>${opencensus.version}</version>
     </dependency>
     <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-exporter-trace-ocagent</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-core</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
       <groupId>io.grpc</groupId>
       <artifactId>grpc-netty</artifactId>
       <version>${grpc.version}</version>
     </dependency>
     <dependency>
       <groupId>io.grpc</groupId>
+      <artifactId>grpc-netty-shaded</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.grpc</groupId>
       <artifactId>grpc-protobuf</artifactId>
       <version>${grpc.version}</version>
     </dependency>
@@ -65,9 +119,14 @@
       <version>${grpc.version}</version>
     </dependency>
     <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-census</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
       <groupId>io.prometheus</groupId>
       <artifactId>simpleclient_httpserver</artifactId>
-      <version>0.3.0</version>
+      <version>0.6.0</version>
     </dependency>
     <dependency>
       <groupId>io.opencensus</groupId>
@@ -81,6 +140,31 @@
       <version>2.0.8.Final</version>
       <scope>runtime</scope>
     </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>javax.servlet-api</artifactId>
+      <version>3.1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-client</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <version>1.7.25</version>
+    </dependency>
   </dependencies>
   <build>
     <extensions>
@@ -142,17 +226,56 @@
               <id>HelloWorldServer</id>
               <mainClass>io.opencensus.examples.grpc.helloworld.HelloWorldServer</mainClass>
             </program>
+            <program>
+              <id>Repl</id>
+              <mainClass>io.opencensus.examples.quickstart.Repl</mainClass>
+            </program>
+            <program>
+              <id>StackdriverQuickstart</id>
+              <mainClass>io.opencensus.examples.stats.StackdriverQuickstart</mainClass>
+            </program>
+            <program>
+              <id>DerivedDoubleGaugeQuickstart</id>
+              <mainClass>io.opencensus.examples.gauges.DerivedDoubleGaugeQuickstart</mainClass>
+            </program>
+            <program>
+              <id>DerivedLongGaugeQuickstart</id>
+              <mainClass>io.opencensus.examples.gauges.DerivedLongGaugeQuickstart</mainClass>
+            </program>
+            <program>
+              <id>DoubleGaugeQuickstart</id>
+              <mainClass>io.opencensus.examples.gauges.DoubleGaugeQuickstart</mainClass>
+            </program>
+            <program>
+              <id>LongGaugeQuickstart</id>
+              <mainClass>io.opencensus.examples.gauges.LongGaugeQuickstart</mainClass>
+            </program>
+            <program>
+              <id>HttpJettyClient</id>
+              <mainClass>io.opencensus.examples.http.jetty.client.HelloWorldClient</mainClass>
+            </program>
+            <program>
+              <id>HttpJettyServer</id>
+              <mainClass>io.opencensus.examples.http.jetty.server.HelloWorldServer</mainClass>
+            </program>
+            <program>
+              <id>OcAgentExportersQuickStart</id>
+              <mainClass>io.opencensus.examples.ocagent.OcAgentExportersQuickStart</mainClass>
+            </program>
           </programs>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.xolstice.maven.plugins</groupId>
         <artifactId>protobuf-maven-plugin</artifactId>
-        <version>0.5.0</version>
+        <version>0.6.1</version>
         <configuration>
-          <protocArtifact>com.google.protobuf:protoc:3.5.1-1:exe:${os.detected.classifier}</protocArtifact>
+          <protocArtifact>com.google.protobuf:protoc:3.6.1:exe:${os.detected.classifier}
+          </protocArtifact>
           <pluginId>grpc-java</pluginId>
-          <pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
+          <pluginArtifact>
+            io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}
+          </pluginArtifact>
         </configuration>
         <executions>
           <execution>
@@ -166,4 +289,3 @@
     </plugins>
   </build>
 </project>
-
diff --git a/examples/spring/servlet/.gitignore b/examples/spring/servlet/.gitignore
new file mode 100644
index 0000000..51f2f98
--- /dev/null
+++ b/examples/spring/servlet/.gitignore
@@ -0,0 +1,3 @@
+.mvn/**
+mvn**
+
diff --git a/examples/spring/servlet/application.properties b/examples/spring/servlet/application.properties
new file mode 100644
index 0000000..d934d4d
--- /dev/null
+++ b/examples/spring/servlet/application.properties
@@ -0,0 +1,2 @@
+opencensus.spring.enabled = true
+opencensus.spring.trace.publicEndpoint = false
diff --git a/examples/spring/servlet/build.gradle b/examples/spring/servlet/build.gradle
new file mode 100644
index 0000000..c993f27
--- /dev/null
+++ b/examples/spring/servlet/build.gradle
@@ -0,0 +1,107 @@
+description = 'OpenCensus Examples Spring Servlet'
+
+buildscript {
+    repositories {
+        mavenCentral()
+        mavenLocal()
+        maven {
+            url "https://plugins.gradle.org/m2/"
+        }
+    }
+    dependencies {
+        classpath 'org.springframework.boot:spring-boot-gradle-plugin:2.0.5.RELEASE'
+        classpath 'com.github.ben-manes:gradle-versions-plugin:0.20.0'
+        classpath "gradle.plugin.com.github.sherter.google-java-format:google-java-format-gradle-plugin:0.8"
+    }
+}
+
+apply plugin: "checkstyle"
+apply plugin: 'com.github.sherter.google-java-format'
+apply plugin: 'idea'
+apply plugin: 'java'
+
+// Display the version report using: ./gradlew dependencyUpdates
+// Also see https://github.com/ben-manes/gradle-versions-plugin.
+apply plugin: 'com.github.ben-manes.versions'
+
+repositories {
+    mavenCentral()
+    mavenLocal()
+}
+
+group = "io.opencensus"
+version = "0.32.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
+
+def opencensusVersion = "0.26.0" // LATEST_OPENCENSUS_RELEASE_VERSION
+def prometheusVersion = "0.6.0"
+def httpasyncclientVersion = "4.1.4"
+
+
+tasks.withType(JavaCompile) {
+    sourceCompatibility = '1.8'
+    targetCompatibility = '1.8'
+}
+
+googleJavaFormat {
+    toolVersion '1.7'
+    source = 'src/main'
+    include '**/*.java'
+}
+
+verifyGoogleJavaFormat {
+    source = 'src/main'
+    include '**/*.java'
+}
+
+// Inform IDEs like IntelliJ IDEA, Eclipse or NetBeans about the generated code.
+sourceSets {
+    main {
+        java {
+            srcDir 'src'
+        }
+    }
+}
+
+checkstyle {
+    configFile = file("$rootDir/../../../buildscripts/checkstyle.xml")
+    toolVersion = "8.12"
+    ignoreFailures = false
+    configProperties["rootDir"] = "$rootDir/../../.."
+}
+
+// Disable checkstyle if no java8.
+checkstyleMain.source = 'src/main'
+checkstyleTest.source = 'src/main'
+buildscript {
+    dependencies {
+        classpath 'org.springframework.boot:spring-boot-gradle-plugin:2.0.5.RELEASE'
+    }
+}
+
+apply plugin: 'java'
+apply plugin: 'org.springframework.boot'
+apply plugin: 'io.spring.dependency-management'
+
+bootJar {
+    mainClassName = 'com.baeldung.Application'
+    baseName = 'opencensus-examples-spring-servlet'
+    version = "0.32.0-SNAPSHOT" // CURRENT_OPENCENSUS_VERSION
+}
+
+sourceCompatibility = 1.8
+targetCompatibility = 1.8
+
+dependencyManagement {
+    imports {
+        mavenBom "io.opencensus:opencensus-contrib-spring-starter:${opencensusVersion}"
+    }
+}
+
+dependencies {
+    compile("io.opencensus:opencensus-contrib-spring-starter:${opencensusVersion}")
+
+    compile("io.opencensus:opencensus-exporter-stats-prometheus:${opencensusVersion}",
+            "io.opencensus:opencensus-exporter-trace-logging:${opencensusVersion}",
+            "io.prometheus:simpleclient_httpserver:${prometheusVersion}",
+            "org.apache.httpcomponents:httpasyncclient:${httpasyncclientVersion}")
+}
diff --git a/examples/spring/servlet/gradle/wrapper/gradle-wrapper.jar b/examples/spring/servlet/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..758de96
--- /dev/null
+++ b/examples/spring/servlet/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/examples/spring/servlet/gradle/wrapper/gradle-wrapper.properties b/examples/spring/servlet/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..a95009c
--- /dev/null
+++ b/examples/spring/servlet/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/examples/spring/servlet/gradlew b/examples/spring/servlet/gradlew
new file mode 100755
index 0000000..cccdd3d
--- /dev/null
+++ b/examples/spring/servlet/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+  cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/examples/spring/servlet/gradlew.bat b/examples/spring/servlet/gradlew.bat
new file mode 100644
index 0000000..e95643d
--- /dev/null
+++ b/examples/spring/servlet/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off

+@rem ##########################################################################

+@rem

+@rem  Gradle startup script for Windows

+@rem

+@rem ##########################################################################

+

+@rem Set local scope for the variables with windows NT shell

+if "%OS%"=="Windows_NT" setlocal

+

+set DIRNAME=%~dp0

+if "%DIRNAME%" == "" set DIRNAME=.

+set APP_BASE_NAME=%~n0

+set APP_HOME=%DIRNAME%

+

+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.

+set DEFAULT_JVM_OPTS=

+

+@rem Find java.exe

+if defined JAVA_HOME goto findJavaFromJavaHome

+

+set JAVA_EXE=java.exe

+%JAVA_EXE% -version >NUL 2>&1

+if "%ERRORLEVEL%" == "0" goto init

+

+echo.

+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:findJavaFromJavaHome

+set JAVA_HOME=%JAVA_HOME:"=%

+set JAVA_EXE=%JAVA_HOME%/bin/java.exe

+

+if exist "%JAVA_EXE%" goto init

+

+echo.

+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:init

+@rem Get command-line arguments, handling Windows variants

+

+if not "%OS%" == "Windows_NT" goto win9xME_args

+

+:win9xME_args

+@rem Slurp the command line arguments.

+set CMD_LINE_ARGS=

+set _SKIP=2

+

+:win9xME_args_slurp

+if "x%~1" == "x" goto execute

+

+set CMD_LINE_ARGS=%*

+

+:execute

+@rem Setup the command line

+

+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar

+

+@rem Execute Gradle

+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%

+

+:end

+@rem End local scope for the variables with windows NT shell

+if "%ERRORLEVEL%"=="0" goto mainEnd

+

+:fail

+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of

+rem the _cmd.exe /c_ return code!

+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1

+exit /b 1

+

+:mainEnd

+if "%OS%"=="Windows_NT" endlocal

+

+:omega

diff --git a/examples/spring/servlet/pom.xml b/examples/spring/servlet/pom.xml
new file mode 100644
index 0000000..0c3e102
--- /dev/null
+++ b/examples/spring/servlet/pom.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>io.opencensus</groupId>
+  <artifactId>opencensus-examples-spring-servlet</artifactId>
+  <version>0.27.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <!-- change to the version you want to use. -->
+    <apachehttp.version>4.1.4</apachehttp.version>
+    <opencensus.version>0.26.0</opencensus.version><!-- LATEST_OPENCENSUS_RELEASE_VERSION -->
+    <prometheus.version>0.6.0</prometheus.version>
+    <springboot.version>2.5.12</springboot.version>
+    <java.version>1.8</java.version>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-web</artifactId>
+      <version>${springboot.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-contrib-spring-starter</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-exporter-stats-prometheus</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.opencensus</groupId>
+      <artifactId>opencensus-exporter-trace-logging</artifactId>
+      <version>${opencensus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.prometheus</groupId>
+      <artifactId>simpleclient_httpserver</artifactId>
+      <version>${prometheus.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpasyncclient</artifactId>
+      <version>${apachehttp.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <extensions>
+      <extension>
+        <groupId>kr.motd.maven</groupId>
+        <artifactId>os-maven-plugin</artifactId>
+        <version>1.5.0.Final</version>
+      </extension>
+    </extensions>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-compiler-plugin</artifactId>
+          <version>3.7.0</version>
+          <configuration>
+            <source>1.8</source>
+            <target>1.8</target>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.10</version>
+        <configuration>
+          <programs>
+            <program>
+              <id>SpringServletApplication</id>
+              <mainClass>io.opencensus.examples.spring.servlet.Application</mainClass>
+            </program>
+          </programs>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
diff --git a/examples/spring/servlet/settings.gradle b/examples/spring/servlet/settings.gradle
new file mode 100644
index 0000000..4de5678
--- /dev/null
+++ b/examples/spring/servlet/settings.gradle
@@ -0,0 +1 @@
+rootProject.name = 'opencensus-examples-spring-servlet'
diff --git a/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/Application.java b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/Application.java
new file mode 100644
index 0000000..fba3ba3
--- /dev/null
+++ b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/Application.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.spring.servlet;
+
+import io.opencensus.contrib.http.util.HttpViews;
+import io.opencensus.exporter.stats.prometheus.PrometheusStatsCollector;
+import io.opencensus.exporter.trace.logging.LoggingTraceExporter;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.samplers.Samplers;
+import io.prometheus.client.exporter.HTTPServer;
+import java.io.IOException;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.ApplicationContext;
+
+@SpringBootApplication
+public class Application {
+
+  private static void initStatsExporter() throws IOException {
+    HttpViews.registerAllServerViews();
+    HttpViews.registerAllClientViews();
+
+    // Register Prometheus exporters and export metrics to a Prometheus HTTPServer.
+    // Refer to https://prometheus.io/ to run Prometheus Server.
+    PrometheusStatsCollector.createAndRegister();
+    HTTPServer prometheusServer = new HTTPServer(9090, true);
+  }
+
+  private static void initTracingAndLoggingExporter() {
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    traceConfig.updateActiveTraceParams(
+        traceConfig.getActiveTraceParams().toBuilder().setSampler(Samplers.alwaysSample()).build());
+
+    LoggingTraceExporter.register();
+  }
+
+  /** Main launcher for the SpringServletApplication. */
+  public static void main(String[] args) throws IOException {
+    ApplicationContext ctx = SpringApplication.run(Application.class, args);
+
+    initTracingAndLoggingExporter();
+    initStatsExporter();
+  }
+}
diff --git a/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/ApplicationAutoConfiguration.java b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/ApplicationAutoConfiguration.java
new file mode 100644
index 0000000..deaa67c
--- /dev/null
+++ b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/ApplicationAutoConfiguration.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.spring.servlet;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.client.AsyncClientHttpRequestFactory;
+import org.springframework.http.client.HttpComponentsAsyncClientHttpRequestFactory;
+import org.springframework.web.client.AsyncRestTemplate;
+
+@Configuration
+public class ApplicationAutoConfiguration {
+
+  /* Instance of AsyncRestTemplate. */
+  @Bean
+  public AsyncRestTemplate getAsyncRestTemplate(AsyncClientHttpRequestFactory factory) {
+    return new AsyncRestTemplate(factory);
+  }
+
+  /**
+   * Factory for AsyncClientHttpRequest.
+   *
+   * @return AsyncClientHttpRequestFactory
+   */
+  @Bean
+  public AsyncClientHttpRequestFactory getAsyncClientHttpRequestFactory() {
+    int timeout = 5000;
+    HttpComponentsAsyncClientHttpRequestFactory asyncClientHttpRequestFactory =
+        new HttpComponentsAsyncClientHttpRequestFactory();
+    asyncClientHttpRequestFactory.setConnectTimeout(timeout);
+    return asyncClientHttpRequestFactory;
+  }
+}
diff --git a/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/HelloController.java b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/HelloController.java
new file mode 100644
index 0000000..b57985f
--- /dev/null
+++ b/examples/spring/servlet/src/main/java/io/opencensus/examples/spring/servlet/HelloController.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.spring.servlet;
+
+import java.util.concurrent.ExecutionException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.env.Environment;
+import org.springframework.http.ResponseEntity;
+import org.springframework.http.client.ClientHttpRequestFactory;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.client.AsyncRestTemplate;
+import org.springframework.web.client.RestTemplate;
+
+/* Controller for Web server. */
+@RestController
+public class HelloController {
+  private static final Logger logger = Logger.getLogger(HelloController.class.getName());
+
+  /**
+   * Serves index page.
+   *
+   * @return String
+   */
+  @RequestMapping("/")
+  public String index() {
+    String str = "Hello from servlet instrumented with opencensus-spring";
+    String resp = restTemplate.getForObject(baseUrl() + "loopback", String.class);
+
+    String asyncUrl = baseUrl() + "asyncloopback";
+    ListenableFuture<ResponseEntity<String>> future1 =
+        asyncRestTemplate.getForEntity(asyncUrl, String.class);
+    ListenableFuture<ResponseEntity<String>> future2 =
+        asyncRestTemplate.getForEntity(asyncUrl, String.class);
+    ListenableFuture<ResponseEntity<String>> future3 =
+        asyncRestTemplate.getForEntity(asyncUrl, String.class);
+
+    String resp1 = null;
+    String resp2 = null;
+    String resp3 = null;
+    try {
+      resp1 = future1.get().toString();
+      resp2 = future2.get().toString();
+      resp3 = future3.get().toString();
+    } catch (InterruptedException | ExecutionException e) {
+      logger.log(Level.WARNING, "request failed", e);
+    }
+    return str + resp + "\n" + resp1 + "\n" + resp2 + "\n" + resp3;
+  }
+
+  /* Serves loopback endpoint. */
+  @RequestMapping("/loopback")
+  public String loopback() {
+    return "Loopback. Hello from servlet!";
+  }
+
+  /* Serves asyncloopback endpoint. */
+  @RequestMapping("/asyncloopback")
+  public String asyncLoopback() {
+    return "Async Loopback. Hello from servlet!";
+  }
+
+  private String baseUrl() {
+    Integer port = environment.getProperty("local.server.port", Integer.class);
+    return String.format("http://localhost:%d/", (port != null ? port : 0));
+  }
+
+  @Autowired Environment environment;
+
+  @Autowired AsyncRestTemplate asyncRestTemplate;
+
+  RestTemplate restTemplate = new RestTemplate(getClientHttpRequestFactory());
+
+  private ClientHttpRequestFactory getClientHttpRequestFactory() {
+    int timeout = 5000;
+    HttpComponentsClientHttpRequestFactory clientHttpRequestFactory =
+        new HttpComponentsClientHttpRequestFactory();
+    clientHttpRequestFactory.setConnectTimeout(timeout);
+    return clientHttpRequestFactory;
+  }
+}
diff --git a/examples/spring/servlet/src/resources/META-INF/spring.factories b/examples/spring/servlet/src/resources/META-INF/spring.factories
new file mode 100644
index 0000000..4dd75dd
--- /dev/null
+++ b/examples/spring/servlet/src/resources/META-INF/spring.factories
@@ -0,0 +1,3 @@
+# Auto Configuration
+org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
+io.opencensus.examples.spring.servlet.ApplicationAutoConfiguration
diff --git a/examples/spring/servlet/src/resources/application.properties b/examples/spring/servlet/src/resources/application.properties
new file mode 100644
index 0000000..04c0b54
--- /dev/null
+++ b/examples/spring/servlet/src/resources/application.properties
@@ -0,0 +1 @@
+opencensus.spring.trace.propagation=B3
\ No newline at end of file
diff --git a/examples/src/main/java/io/opencensus/examples/gauges/DerivedDoubleGaugeQuickstart.java b/examples/src/main/java/io/opencensus/examples/gauges/DerivedDoubleGaugeQuickstart.java
new file mode 100644
index 0000000..d3cbc4a
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/gauges/DerivedDoubleGaugeQuickstart.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.gauges;
+
+import io.opencensus.common.ToDoubleFunction;
+import io.opencensus.metrics.DerivedDoubleGauge;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+
+/** Example showing how to create a {@link DerivedDoubleGauge}. */
+public class DerivedDoubleGaugeQuickstart {
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  // The label keys and values are used to uniquely identify timeseries.
+  private static final List<LabelKey> labelKeys =
+      Collections.singletonList(LabelKey.create("Name", "desc"));
+  private static final List<LabelValue> labelValues =
+      Collections.singletonList(LabelValue.create("Inbound"));
+
+  private static final DerivedDoubleGauge derivedDoubleGauge =
+      metricRegistry.addDerivedDoubleGauge("queue_size", "Pending jobs", "1", labelKeys);
+  private static final LinkedBlockingQueue blockingQueue = new LinkedBlockingQueue();
+
+  // To instrument a queue's depth.
+  private static void doWork() {
+    derivedDoubleGauge.createTimeSeries(
+        labelValues,
+        blockingQueue,
+        new ToDoubleFunction<LinkedBlockingQueue>() {
+          @Override
+          public double applyAsDouble(LinkedBlockingQueue queue) {
+            return queue.size();
+          }
+        });
+
+    // Your code here.
+  }
+
+  /** Main launcher for the DerivedDoubleGaugeQuickstart. */
+  public static void main(String[] args) {
+    // Derived Double Gauge metric is used to report instantaneous measurement of a double value.
+    // This is more convenient form when you want to define a gauge by executing a
+    // {@link ToDoubleFunction} on an object.
+
+    doWork();
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/gauges/DerivedLongGaugeQuickstart.java b/examples/src/main/java/io/opencensus/examples/gauges/DerivedLongGaugeQuickstart.java
new file mode 100644
index 0000000..bf8236b
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/gauges/DerivedLongGaugeQuickstart.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.gauges;
+
+import io.opencensus.common.ToLongFunction;
+import io.opencensus.metrics.DerivedLongGauge;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+
+/** Example showing how to create a {@link DerivedLongGauge}. */
+public class DerivedLongGaugeQuickstart {
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  // The label keys and values are used to uniquely identify timeseries.
+  private static final List<LabelKey> labelKeys =
+      Collections.singletonList(LabelKey.create("Name", "desc"));
+  private static final List<LabelValue> labelValues =
+      Collections.singletonList(LabelValue.create("Inbound"));
+
+  private static final DerivedLongGauge derivedLongGauge =
+      metricRegistry.addDerivedLongGauge("queue_size", "Pending jobs", "1", labelKeys);
+  private static final LinkedBlockingQueue blockingQueue = new LinkedBlockingQueue();
+
+  // To instrument a queue's depth.
+  private static void doWork() {
+    derivedLongGauge.createTimeSeries(
+        labelValues,
+        blockingQueue,
+        new ToLongFunction<LinkedBlockingQueue>() {
+          @Override
+          public long applyAsLong(LinkedBlockingQueue queue) {
+            return queue.size();
+          }
+        });
+
+    // Your code here.
+  }
+
+  /** Main launcher for the DerivedLongGaugeQuickstart. */
+  public static void main(String[] args) {
+    // Derived Long Gauge metric is used to report instantaneous measurement of an int64 value.
+    // This is more convenient form when you want to define a gauge by executing a
+    // {@link ToLongFunction} on an object.
+
+    doWork();
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/gauges/DoubleGaugeQuickstart.java b/examples/src/main/java/io/opencensus/examples/gauges/DoubleGaugeQuickstart.java
new file mode 100644
index 0000000..b3b1d2f
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/gauges/DoubleGaugeQuickstart.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.gauges;
+
+import io.opencensus.metrics.DoubleGauge;
+import io.opencensus.metrics.DoubleGauge.DoublePoint;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Example showing how to create a {@link DoubleGauge} and manually set or add value of the gauge.
+ */
+public class DoubleGaugeQuickstart {
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  // The label keys and values are used to uniquely identify timeseries.
+  private static final List<LabelKey> labelKeys =
+      Collections.singletonList(LabelKey.create("Name", "desc"));
+  private static final List<LabelValue> labelValues =
+      Collections.singletonList(LabelValue.create("Inbound"));
+
+  private static final DoubleGauge doubleGauge =
+      metricRegistry.addDoubleGauge("queue_size", "Pending jobs", "1", labelKeys);
+  // It is recommended to keep a reference of a point for manual operations.
+  private static final DoublePoint pendingJobs = doubleGauge.getOrCreateTimeSeries(labelValues);
+
+  // Tracks the number of pending jobs in the queue.
+  private static void doWork() {
+    addJob();
+    // Your code here.
+    removeJob();
+  }
+
+  private static void addJob() {
+    pendingJobs.add(1);
+    // Your code here.
+  }
+
+  private static void removeJob() {
+    // Your code here.
+    pendingJobs.add(-1);
+  }
+
+  /** Main launcher for the DoubleGaugeQuickstart. */
+  public static void main(String[] args) {
+    // Double Gauge metric is used to report instantaneous measurement of a double value. This is
+    // more convenient form when you want to manually increase and decrease values as per your
+    // service requirements.
+
+    doWork();
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/gauges/LongGaugeQuickstart.java b/examples/src/main/java/io/opencensus/examples/gauges/LongGaugeQuickstart.java
new file mode 100644
index 0000000..ed53e2b
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/gauges/LongGaugeQuickstart.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.gauges;
+
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.LongGauge;
+import io.opencensus.metrics.LongGauge.LongPoint;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import java.util.Collections;
+import java.util.List;
+
+/** Example showing how to create a {@link LongGauge} and manually set or add value of the gauge. */
+public class LongGaugeQuickstart {
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  // The label keys and values are used to uniquely identify timeseries.
+  private static final List<LabelKey> labelKeys =
+      Collections.singletonList(LabelKey.create("Name", "desc"));
+  private static final List<LabelValue> labelValues =
+      Collections.singletonList(LabelValue.create("Inbound"));
+
+  private static final LongGauge longGauge =
+      metricRegistry.addLongGauge("queue_size", "Pending jobs", "1", labelKeys);
+  // It is recommended to keep a reference of a point for manual operations.
+  private static final LongPoint pendingJobs = longGauge.getOrCreateTimeSeries(labelValues);
+
+  // Tracks the number of pending jobs in the queue.
+  private static void doWork() {
+    addJob();
+    // Your code here.
+    removeJob();
+  }
+
+  private static void addJob() {
+    pendingJobs.add(1);
+    // Your code here.
+  }
+
+  private static void removeJob() {
+    // Your code here.
+    pendingJobs.add(-1);
+  }
+
+  /** Main launcher for the LongGaugeQuickstart. */
+  public static void main(String[] args) {
+    // Long Gauge metric is used to report instantaneous measurement of an int64 value. This is
+    // more convenient form when you want to manually increase and decrease values as per your
+    // service requirements.
+
+    doWork();
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldClient.java b/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldClient.java
index 30e4163..d0aa7a0 100644
--- a/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldClient.java
+++ b/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldClient.java
@@ -57,7 +57,7 @@
         ManagedChannelBuilder.forAddress(host, port)
             // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
             // needing certificates.
-            .usePlaintext(true)
+            .usePlaintext()
             .build());
   }
 
@@ -108,7 +108,8 @@
     final String cloudProjectId = getStringOrDefaultFromArgs(args, 3, null);
     final int zPagePort = getPortOrDefaultFromArgs(args, 4, 3001);
 
-    // Registers all RPC views.
+    // Registers all RPC views. For demonstration all views are registered. You may want to
+    // start with registering basic views and register other views as needed for your application.
     RpcViews.registerAllViews();
 
     // Starts a HTTP server and registers all Zpages to it.
@@ -125,7 +126,7 @@
       StackdriverStatsExporter.createAndRegister(
           StackdriverStatsConfiguration.builder()
               .setProjectId(cloudProjectId)
-              .setExportInterval(Duration.create(15, 0))
+              .setExportInterval(Duration.create(60, 0))
               .build());
     }
 
diff --git a/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldServer.java b/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldServer.java
index 15a0a89..3650eba 100644
--- a/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldServer.java
+++ b/examples/src/main/java/io/opencensus/examples/grpc/helloworld/HelloWorldServer.java
@@ -39,6 +39,7 @@
 import io.opencensus.trace.Status;
 import io.opencensus.trace.Tracer;
 import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
 import io.opencensus.trace.samplers.Samplers;
 import io.prometheus.client.exporter.HTTPServer;
 import java.io.IOException;
@@ -60,10 +61,7 @@
   // A helper function that performs some work in its own Span.
   private static void performWork(Span parent) {
     SpanBuilder spanBuilder =
-        tracer
-            .spanBuilderWithExplicitParent("internal_work", parent)
-            .setRecordEvents(true)
-            .setSampler(Samplers.alwaysSample());
+        tracer.spanBuilderWithExplicitParent("internal_work", parent).setRecordEvents(true);
     try (Scope scope = spanBuilder.startScopedSpan()) {
       Span span = tracer.getCurrentSpan();
       span.putAttribute("my_attribute", AttributeValue.stringAttributeValue("blue"));
@@ -120,7 +118,13 @@
     final int zPagePort = getPortOrDefaultFromArgs(args, 2, 3000);
     final int prometheusPort = getPortOrDefaultFromArgs(args, 3, 9090);
 
-    // Registers all RPC views.
+    // For demo purposes, always sample
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    traceConfig.updateActiveTraceParams(
+        traceConfig.getActiveTraceParams().toBuilder().setSampler(Samplers.alwaysSample()).build());
+
+    // Registers all RPC views. For demonstration all views are registered. You may want to
+    // start with registering basic views and register other views as needed for your application.
     RpcViews.registerAllViews();
 
     // Registers logging trace exporter.
@@ -137,7 +141,7 @@
       StackdriverStatsExporter.createAndRegister(
           StackdriverStatsConfiguration.builder()
               .setProjectId(cloudProjectId)
-              .setExportInterval(Duration.create(15, 0))
+              .setExportInterval(Duration.create(5, 0))
               .build());
     }
 
diff --git a/examples/src/main/java/io/opencensus/examples/http/jetty/client/HelloWorldClient.java b/examples/src/main/java/io/opencensus/examples/http/jetty/client/HelloWorldClient.java
new file mode 100644
index 0000000..2f56c9e
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/http/jetty/client/HelloWorldClient.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.http.jetty.client;
+
+import io.opencensus.contrib.http.jetty.client.OcJettyHttpClient;
+import io.opencensus.contrib.http.util.HttpViews;
+import io.opencensus.exporter.stats.prometheus.PrometheusStatsCollector;
+import io.opencensus.exporter.trace.jaeger.JaegerTraceExporter;
+import io.opencensus.exporter.trace.logging.LoggingTraceExporter;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.samplers.Samplers;
+import io.prometheus.client.exporter.HTTPServer;
+import java.io.IOException;
+import org.apache.log4j.BasicConfigurator;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.eclipse.jetty.client.HttpRequest;
+import org.eclipse.jetty.client.http.HttpClientTransportOverHTTP;
+import org.eclipse.jetty.client.util.StringContentProvider;
+import org.eclipse.jetty.http.HttpMethod;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+
+/** Sample application that shows how to instrument jetty client. */
+public class HelloWorldClient {
+
+  private static final Logger logger = Logger.getLogger(HelloWorldClient.class.getName());
+
+  private static void initTracing() {
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    Logger.getRootLogger().setLevel(Level.INFO);
+    traceConfig.updateActiveTraceParams(
+        traceConfig.getActiveTraceParams().toBuilder().setSampler(Samplers.alwaysSample()).build());
+
+    LoggingTraceExporter.register();
+    // Register Jaeger Tracing. Refer to https://www.jaegertracing.io/docs/1.8/getting-started/ to
+    // run Jaeger
+    JaegerTraceExporter.createAndRegister("http://localhost:14268/api/traces", "helloworldclient");
+  }
+
+  private static void initStatsExporter() throws IOException {
+    HttpViews.registerAllClientViews();
+
+    // Register Prometheus exporters and export metrics to a Prometheus HTTPServer.
+    // Refer to https://prometheus.io/ to run Prometheus Server.
+    PrometheusStatsCollector.createAndRegister();
+    HTTPServer prometheusServer = new HTTPServer(9091, true);
+  }
+
+  /**
+   * HelloWorldClient sends http request periodically to {@link HelloWorldServer}. These requests
+   * are instrumented using opencensus jetty client library to enable tracing and monitoring stats.
+   */
+  public static void main(String[] args) throws Exception {
+    BasicConfigurator.configure();
+
+    initTracing();
+    initStatsExporter();
+
+    // Create http client that will trace requests. By default trace context is propagated using
+    // w3c TraceContext propagator.
+    // To use B3 propagation use following
+    //    OcJettyHttpClient httpClient =
+    //        new OcJettyHttpClient(
+    //            new HttpClientTransportOverHTTP(),
+    //            new SslContextFactory(),
+    //            null,
+    //            Tracing.getPropagationComponent().getB3Format());
+    OcJettyHttpClient httpClient =
+        new OcJettyHttpClient(
+            new HttpClientTransportOverHTTP(), new SslContextFactory(), null, null);
+
+    httpClient.start();
+
+    do {
+      HttpRequest request =
+          (HttpRequest)
+              httpClient
+                  .newRequest("http://localhost:8080/helloworld/request")
+                  .method(HttpMethod.GET);
+      HttpRequest asyncRequest =
+          (HttpRequest)
+              httpClient
+                  .newRequest("http://localhost:8080/helloworld/request/async")
+                  .method(HttpMethod.GET);
+      HttpRequest postRequest =
+          (HttpRequest)
+              httpClient
+                  .newRequest("http://localhost:8080/helloworld/request")
+                  .method(HttpMethod.POST);
+      postRequest.content(new StringContentProvider("{\"hello\": \"world\"}"), "application/json");
+
+      if (request == null) {
+        logger.info("Request is null");
+        break;
+      }
+
+      request.send();
+      asyncRequest.send();
+      postRequest.send();
+      try {
+        Thread.sleep(15000);
+      } catch (Exception e) {
+        logger.error("Error while sleeping");
+      }
+    } while (true);
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/http/jetty/server/HelloWorldServer.java b/examples/src/main/java/io/opencensus/examples/http/jetty/server/HelloWorldServer.java
new file mode 100644
index 0000000..bb35335
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/http/jetty/server/HelloWorldServer.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.http.jetty.server;
+
+import io.opencensus.contrib.http.servlet.OcHttpServletFilter;
+import io.opencensus.contrib.http.util.HttpViews;
+import io.opencensus.exporter.stats.prometheus.PrometheusStatsCollector;
+import io.opencensus.exporter.trace.jaeger.JaegerTraceExporter;
+import io.opencensus.exporter.trace.logging.LoggingTraceExporter;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.samplers.Samplers;
+import io.prometheus.client.exporter.HTTPServer;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.EnumSet;
+import javax.servlet.AsyncContext;
+import javax.servlet.DispatcherType;
+import javax.servlet.ServletException;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.WriteListener;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.log4j.Logger;
+import org.eclipse.jetty.server.Request;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.handler.AbstractHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+
+/** Sample application that shows how to instrument jetty server. */
+public class HelloWorldServer extends AbstractHandler {
+
+  private static final Logger logger = Logger.getLogger(HelloWorldServer.class.getName());
+
+  public static class HelloServlet extends HttpServlet {
+
+    private static String body = "<h1>Hello World Servlet Get</h1>";
+
+    private static final long serialVersionUID = 1L;
+
+    private void blockingGet(HttpServletRequest request, HttpServletResponse response)
+        throws ServletException, IOException {
+
+      String str = body.concat("<h3>blocking</h3>");
+      ByteBuffer content = ByteBuffer.wrap(str.getBytes(StandardCharsets.UTF_8));
+
+      PrintWriter pout = response.getWriter();
+
+      pout.print("<html><body>");
+      pout.print(str);
+      pout.print("</body></html>");
+      return;
+    }
+
+    private void asyncGet(HttpServletRequest request, HttpServletResponse response)
+        throws ServletException, IOException {
+      String str = body.concat("<h3>async</h3>");
+      ByteBuffer content = ByteBuffer.wrap(str.getBytes(StandardCharsets.UTF_8));
+
+      AsyncContext async = request.startAsync();
+      response.setContentType("text/html");
+      try {
+        Thread.sleep(100);
+      } catch (Exception e) {
+        logger.info("Error sleeping");
+      }
+      ServletOutputStream out = response.getOutputStream();
+      out.setWriteListener(
+          new WriteListener() {
+            @Override
+            public void onWritePossible() throws IOException {
+              while (out.isReady()) {
+                if (!content.hasRemaining()) {
+                  response.setStatus(200);
+                  async.complete();
+                  return;
+                }
+                out.write(content.get());
+              }
+            }
+
+            @Override
+            public void onError(Throwable t) {
+              logger.info("Server onError callled");
+              getServletContext().log("Async Error", t);
+              async.complete();
+            }
+          });
+    }
+
+    @Override
+    protected void doGet(HttpServletRequest request, HttpServletResponse response)
+        throws ServletException, IOException {
+      if (request.getPathInfo().contains("async")) {
+        asyncGet(request, response);
+      } else {
+        blockingGet(request, response);
+      }
+    }
+
+    @Override
+    protected void doPost(HttpServletRequest request, HttpServletResponse response)
+        throws ServletException, IOException {
+      // Read from request
+      StringBuilder buffer = new StringBuilder();
+      BufferedReader reader = request.getReader();
+      String line;
+      while ((line = reader.readLine()) != null) {
+        buffer.append(line);
+      }
+      String data = buffer.toString();
+
+      PrintWriter pout = response.getWriter();
+
+      pout.print("<html><body>");
+      pout.print("<h3>Hello World Servlet Post</h3>");
+      pout.print("</body></html>");
+      return;
+    }
+  }
+
+  @Override
+  public void handle(
+      String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
+      throws IOException, ServletException {
+    response.setContentType("text/html;charset=utf-8");
+    response.setStatus(HttpServletResponse.SC_OK);
+    baseRequest.setHandled(true);
+    response.getWriter().println("<h1>Hello World. default handler.</h1>");
+  }
+
+  private static void initStatsExporter() throws IOException {
+    HttpViews.registerAllServerViews();
+
+    // Register Prometheus exporters and export metrics to a Prometheus HTTPServer.
+    // Refer to https://prometheus.io/ to run Prometheus Server.
+    PrometheusStatsCollector.createAndRegister();
+    HTTPServer prometheusServer = new HTTPServer(9090, true);
+  }
+
+  private static void initTracing() {
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+
+    // default sampler is set to Samplers.alwaysSample() for demonstration. In production
+    // or in high QPS environment please use default sampler.
+    traceConfig.updateActiveTraceParams(
+        traceConfig.getActiveTraceParams().toBuilder().setSampler(Samplers.alwaysSample()).build());
+
+    // Register LoggingTraceExporter to see traces in logs.
+    LoggingTraceExporter.register();
+
+    // Register Jaeger Tracing. Refer to https://www.jaegertracing.io/docs/1.8/getting-started/ to
+    // run Jaeger
+    JaegerTraceExporter.createAndRegister("http://localhost:14268/api/traces", "helloworldserver");
+  }
+
+  /**
+   * HelloWorldServer starts a jetty server that responds to http request sent by {@link
+   * HelloWorldClient}. The server uses http servlet which is instrumented with opencensus to enable
+   * tracing and monitoring stats.
+   */
+  public static void main(String[] args) throws Exception {
+    initTracing();
+    initStatsExporter();
+
+    Server server = new Server(8080);
+    ServletContextHandler contextHandler =
+        new ServletContextHandler(ServletContextHandler.SESSIONS);
+    contextHandler.setContextPath("/helloworld");
+    ServletHolder sh = new ServletHolder(new HelloServlet());
+    contextHandler.addServlet(sh, "/request/*");
+
+    // Enable tracing by adding OcHttpServleFilter for all path
+    contextHandler.addFilter(OcHttpServletFilter.class, "/*", EnumSet.of(DispatcherType.REQUEST));
+
+    // Uncomment following line to use B3Format for trace context propagation.
+    // contextHandler.setAttribute(
+    //    OC_TRACE_PROPAGATOR, Tracing.getPropagationComponent().getB3Format());
+
+    // By default publicEndpoint parameter is set to false and incoming trace context is added as
+    // a parent.
+    // If the endpoint for http request is public then uncomment following line to set the
+    // publicEndpoint parameter to true. When set to true incoming trace context is added as a
+    // parent link instead of as a parent.
+    //
+    // contextHandler.setInitParameter(OC_PUBLIC_ENDPOINT, "true");
+
+    server.setHandler(contextHandler);
+    try {
+      server.start();
+      server.join();
+    } catch (Exception e) {
+      logger.error("Failed to start application", e);
+    }
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/ocagent/OcAgentExportersQuickStart.java b/examples/src/main/java/io/opencensus/examples/ocagent/OcAgentExportersQuickStart.java
new file mode 100644
index 0000000..21c4f33
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/ocagent/OcAgentExportersQuickStart.java
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.ocagent;
+
+import io.opencensus.common.Duration;
+import io.opencensus.common.Scope;
+import io.opencensus.exporter.metrics.ocagent.OcAgentMetricsExporter;
+import io.opencensus.exporter.metrics.ocagent.OcAgentMetricsExporterConfiguration;
+import io.opencensus.exporter.trace.ocagent.OcAgentTraceExporter;
+import io.opencensus.exporter.trace.ocagent.OcAgentTraceExporterConfiguration;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.LongGauge;
+import io.opencensus.metrics.LongGauge.LongPoint;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.Tags;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.config.TraceParams;
+import io.opencensus.trace.samplers.Samplers;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+import java.util.Random;
+import java.util.logging.Logger;
+
+/** Sample application that shows how to export traces and metrics to OC-Agent. */
+public class OcAgentExportersQuickStart {
+
+  private static final Logger logger = Logger.getLogger(OcAgentExportersQuickStart.class.getName());
+  private static final Random random = new Random();
+
+  private static final Tracer tracer = Tracing.getTracer();
+  private static final Tagger tagger = Tags.getTagger();
+  private static final StatsRecorder statsRecorder = Stats.getStatsRecorder();
+  private static final ViewManager viewManager = Stats.getViewManager();
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  private static final String SERVICE_NAME = "ocagent-java-exporter-quickstart";
+  private static final String DEFAULT_ENDPOINT = "localhost:55678";
+  private static final Duration RETRY_INTERVAL = Duration.create(10, 0);
+  private static final Duration EXPORT_INTERVAL = Duration.create(5, 0);
+
+  // The latency in milliseconds
+  private static final MeasureDouble M_LATENCY_MS =
+      MeasureDouble.create("repl/latency", "The latency in milliseconds per REPL loop", "ms");
+
+  // Counts the number of lines read.
+  private static final MeasureLong M_LINES_IN =
+      MeasureLong.create("repl/lines_in", "The number of lines read in", "1");
+
+  // Counts the number of non EOF(end-of-file) errors.
+  private static final MeasureLong M_ERRORS =
+      MeasureLong.create("repl/errors", "The number of errors encountered", "1");
+
+  // Counts/groups the lengths of lines read in.
+  private static final MeasureLong M_LINE_LENGTHS =
+      MeasureLong.create("repl/line_lengths", "The distribution of line lengths", "By");
+
+  // The tag "method"
+  private static final TagKey KEY_METHOD = TagKey.create("method");
+
+  // Defining the distribution aggregations
+  private static final Aggregation LATENCY_DISTRIBUTION =
+      Distribution.create(
+          BucketBoundaries.create(
+              Arrays.asList(
+                  // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms,
+                  // >=1s, >=2s, >=4s, >=6s]
+                  0.0,
+                  25.0,
+                  50.0,
+                  75.0,
+                  100.0,
+                  200.0,
+                  400.0,
+                  600.0,
+                  800.0,
+                  1000.0,
+                  2000.0,
+                  4000.0,
+                  6000.0)));
+
+  private static final Aggregation LENGTH_DISTRIBUTION =
+      Distribution.create(
+          BucketBoundaries.create(
+              Arrays.asList(
+                  // [>=0B, >=5B, >=10B, >=20B, >=40B, >=60B, >=80B, >=100B, >=200B, >=400B,
+                  // >=600B,
+                  // >=800B, >=1000B]
+                  0.0,
+                  5.0,
+                  10.0,
+                  20.0,
+                  40.0,
+                  60.0,
+                  80.0,
+                  100.0,
+                  200.0,
+                  400.0,
+                  600.0,
+                  800.0,
+                  1000.0)));
+
+  // Define the count aggregation
+  private static final Aggregation COUNT = Aggregation.Count.create();
+
+  // Empty column
+  private static final List<TagKey> NO_KEYS = Collections.emptyList();
+
+  // Define the views
+  private static final List<View> VIEWS =
+      Arrays.asList(
+          View.create(
+              Name.create("ocjavametrics/latency"),
+              "The distribution of latencies",
+              M_LATENCY_MS,
+              LATENCY_DISTRIBUTION,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/lines_in"),
+              "The number of lines read in from standard input",
+              M_LINES_IN,
+              COUNT,
+              NO_KEYS),
+          View.create(
+              Name.create("ocjavametrics/errors"),
+              "The number of errors encountered",
+              M_ERRORS,
+              COUNT,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/line_lengths"),
+              "The distribution of line lengths",
+              M_LINE_LENGTHS,
+              LENGTH_DISTRIBUTION,
+              NO_KEYS));
+
+  /** Main launcher of the example. */
+  public static void main(String[] args) throws InterruptedException {
+    configureAlwaysSample(); // Always sample for demo purpose. DO NOT use in production.
+    registerAllViews();
+    LongGauge gauge = registerGauge();
+
+    String endPoint = getStringOrDefaultFromArgs(args, 0, DEFAULT_ENDPOINT);
+    registerAgentExporters(endPoint);
+
+    try (Scope scope = tracer.spanBuilder("root").startScopedSpan()) {
+      int iteration = 1;
+      while (true) {
+        doWork(iteration, random.nextInt(10), gauge);
+        iteration++;
+        Thread.sleep(5000);
+      }
+    } catch (InterruptedException e) {
+      logger.info("Thread interrupted, exiting in 5 seconds.");
+      Thread.sleep(5000); // Wait 5s so that last batch will be exported.
+    }
+  }
+
+  private static void configureAlwaysSample() {
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    TraceParams activeTraceParams = traceConfig.getActiveTraceParams();
+    traceConfig.updateActiveTraceParams(
+        activeTraceParams.toBuilder().setSampler(Samplers.alwaysSample()).build());
+  }
+
+  private static void registerAgentExporters(String endPoint) {
+    OcAgentTraceExporter.createAndRegister(
+        OcAgentTraceExporterConfiguration.builder()
+            .setEndPoint(endPoint)
+            .setServiceName(SERVICE_NAME)
+            .setUseInsecure(true)
+            .setEnableConfig(false)
+            .build());
+
+    OcAgentMetricsExporter.createAndRegister(
+        OcAgentMetricsExporterConfiguration.builder()
+            .setEndPoint(endPoint)
+            .setServiceName(SERVICE_NAME)
+            .setUseInsecure(true)
+            .setRetryInterval(RETRY_INTERVAL)
+            .setExportInterval(EXPORT_INTERVAL)
+            .build());
+  }
+
+  private static void registerAllViews() {
+    for (View view : VIEWS) {
+      viewManager.registerView(view);
+    }
+  }
+
+  private static LongGauge registerGauge() {
+    return metricRegistry.addLongGauge(
+        "pending_jobs",
+        "Pending jobs of current iteration",
+        "1",
+        Collections.singletonList(LabelKey.create("Name", "desc")));
+  }
+
+  private static void doWork(int iteration, int jobs, LongGauge gauge) {
+    String childSpanName = "iteration-" + iteration;
+    LabelValue value = LabelValue.create(childSpanName);
+    LongPoint point = gauge.getOrCreateTimeSeries(Collections.singletonList(value));
+    try (Scope scope = tracer.spanBuilder(childSpanName).startScopedSpan()) {
+      for (int i = 0; i < jobs; i++) {
+        String grandChildSpanName = childSpanName + "-job-" + i;
+        try (Scope childScope = tracer.spanBuilder(grandChildSpanName).startScopedSpan()) {
+          point.set(jobs - i);
+          String line = generateRandom(random.nextInt(128));
+          processLine(line);
+          recordStat(M_LINES_IN, 1L);
+          recordStat(M_LINE_LENGTHS, (long) line.length());
+        } catch (Exception e) {
+          tracer.getCurrentSpan().setStatus(Status.INTERNAL.withDescription(e.toString()));
+        }
+      }
+    }
+  }
+
+  private static String generateRandom(int size) {
+    byte[] array = new byte[size];
+    random.nextBytes(array);
+    return new String(array, Charset.forName("UTF-8"));
+  }
+
+  private static String processLine(String line) {
+    long startTimeNs = System.nanoTime();
+
+    try {
+      Thread.sleep(10L);
+      return line.toUpperCase(Locale.US);
+    } catch (Exception e) {
+      recordTaggedStat(KEY_METHOD, "processLine", M_ERRORS, 1L);
+      return "";
+    } finally {
+      long totalTimeNs = System.nanoTime() - startTimeNs;
+      double timespentMs = totalTimeNs / 1e6;
+      recordTaggedStat(KEY_METHOD, "processLine", M_LATENCY_MS, timespentMs);
+    }
+  }
+
+  private static void recordStat(MeasureLong ml, Long n) {
+    TagContext empty = tagger.emptyBuilder().build();
+    statsRecorder.newMeasureMap().put(ml, n).record(empty);
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureLong ml, long n) {
+    TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    statsRecorder.newMeasureMap().put(ml, n).record(context);
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureDouble md, double d) {
+    TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    statsRecorder.newMeasureMap().put(md, d).record(context);
+  }
+
+  private static String getStringOrDefaultFromArgs(String[] args, int index, String defaultString) {
+    String s = defaultString;
+    if (index < args.length) {
+      s = args[index];
+    }
+    return s;
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/quickstart/Repl.java b/examples/src/main/java/io/opencensus/examples/quickstart/Repl.java
new file mode 100644
index 0000000..6491e63
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/quickstart/Repl.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.quickstart;
+
+import io.opencensus.common.Scope;
+import io.opencensus.exporter.stats.prometheus.PrometheusStatsCollector;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.Tags;
+import io.prometheus.client.exporter.HTTPServer;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/** Sample application that shows how to record stats and export to Prometheus. */
+public final class Repl {
+
+  // The latency in milliseconds
+  private static final MeasureDouble M_LATENCY_MS =
+      MeasureDouble.create("repl/latency", "The latency in milliseconds per REPL loop", "ms");
+
+  // Counts the number of lines read in from standard input.
+  private static final MeasureLong M_LINES_IN =
+      MeasureLong.create("repl/lines_in", "The number of lines read in", "1");
+
+  // Counts the number of non EOF(end-of-file) errors.
+  private static final MeasureLong M_ERRORS =
+      MeasureLong.create("repl/errors", "The number of errors encountered", "1");
+
+  // Counts/groups the lengths of lines read in.
+  private static final MeasureLong M_LINE_LENGTHS =
+      MeasureLong.create("repl/line_lengths", "The distribution of line lengths", "By");
+
+  // The tag "method"
+  private static final TagKey KEY_METHOD = TagKey.create("method");
+
+  private static final Tagger tagger = Tags.getTagger();
+  private static final StatsRecorder statsRecorder = Stats.getStatsRecorder();
+
+  /** Main launcher for the Repl example. */
+  public static void main(String... args) {
+    // Step 1. Enable OpenCensus Metrics.
+    try {
+      setupOpenCensusAndPrometheusExporter();
+    } catch (IOException e) {
+      System.err.println("Failed to create and register OpenCensus Prometheus Stats exporter " + e);
+      return;
+    }
+
+    BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
+
+    while (true) {
+      try {
+        readEvaluateProcessLine(stdin);
+      } catch (IOException e) {
+        System.err.println("EOF bye " + e);
+        return;
+      } catch (Exception e) {
+        recordTaggedStat(KEY_METHOD, "repl", M_ERRORS, new Long(1));
+        return;
+      }
+    }
+  }
+
+  private static void recordStat(MeasureLong ml, Long n) {
+    TagContext tctx = tagger.emptyBuilder().build();
+    try (Scope ss = tagger.withTagContext(tctx)) {
+      statsRecorder.newMeasureMap().put(ml, n).record();
+    }
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureLong ml, Long n) {
+    TagContext tctx = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    try (Scope ss = tagger.withTagContext(tctx)) {
+      statsRecorder.newMeasureMap().put(ml, n).record();
+    }
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureDouble md, Double d) {
+    TagContext tctx = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    try (Scope ss = tagger.withTagContext(tctx)) {
+      statsRecorder.newMeasureMap().put(md, d).record();
+    }
+  }
+
+  private static String processLine(String line) {
+    long startTimeNs = System.nanoTime();
+
+    try {
+      return line.toUpperCase();
+    } catch (Exception e) {
+      recordTaggedStat(KEY_METHOD, "processLine", M_ERRORS, new Long(1));
+      return "";
+    } finally {
+      long totalTimeNs = System.nanoTime() - startTimeNs;
+      double timespentMs = (new Double(totalTimeNs)) / 1e6;
+      recordTaggedStat(KEY_METHOD, "processLine", M_LATENCY_MS, timespentMs);
+    }
+  }
+
+  private static void readEvaluateProcessLine(BufferedReader in) throws IOException {
+    System.out.print("> ");
+    System.out.flush();
+
+    String line = in.readLine();
+    String processed = processLine(line);
+    System.out.println("< " + processed + "\n");
+    if (line != null && line.length() > 0) {
+      recordStat(M_LINES_IN, new Long(1));
+      recordStat(M_LINE_LENGTHS, new Long(line.length()));
+    }
+  }
+
+  private static void registerAllViews() {
+    // Defining the distribution aggregations
+    Aggregation latencyDistribution =
+        Distribution.create(
+            BucketBoundaries.create(
+                Arrays.asList(
+                    // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms,
+                    // >=1s, >=2s, >=4s, >=6s]
+                    0.0,
+                    25.0,
+                    50.0,
+                    75.0,
+                    100.0,
+                    200.0,
+                    400.0,
+                    600.0,
+                    800.0,
+                    1000.0,
+                    2000.0,
+                    4000.0,
+                    6000.0)));
+
+    Aggregation lengthsDistribution =
+        Distribution.create(
+            BucketBoundaries.create(
+                Arrays.asList(
+                    // [>=0B, >=5B, >=10B, >=20B, >=40B, >=60B, >=80B, >=100B, >=200B, >=400B,
+                    // >=600B,
+                    // >=800B, >=1000B]
+                    0.0,
+                    5.0,
+                    10.0,
+                    20.0,
+                    40.0,
+                    60.0,
+                    80.0,
+                    100.0,
+                    200.0,
+                    400.0,
+                    600.0,
+                    800.0,
+                    1000.0)));
+
+    // Define the count aggregation
+    Aggregation countAggregation = Aggregation.Count.create();
+
+    // So tagKeys
+    List<TagKey> noKeys = new ArrayList<TagKey>();
+
+    // Define the views
+    View[] views =
+        new View[] {
+          View.create(
+              Name.create("ocjavametrics/latency"),
+              "The distribution of latencies",
+              M_LATENCY_MS,
+              latencyDistribution,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/lines_in"),
+              "The number of lines read in from standard input",
+              M_LINES_IN,
+              countAggregation,
+              noKeys),
+          View.create(
+              Name.create("ocjavametrics/errors"),
+              "The number of errors encountered",
+              M_ERRORS,
+              countAggregation,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/line_lengths"),
+              "The distribution of line lengths",
+              M_LINE_LENGTHS,
+              lengthsDistribution,
+              noKeys)
+        };
+
+    // Create the view manager
+    ViewManager vmgr = Stats.getViewManager();
+
+    // Then finally register the views
+    for (View view : views) {
+      vmgr.registerView(view);
+    }
+  }
+
+  private static void setupOpenCensusAndPrometheusExporter() throws IOException {
+    // Firstly register the views
+    registerAllViews();
+
+    // Create and register the Prometheus exporter
+    PrometheusStatsCollector.createAndRegister();
+
+    // Run the server as a daemon on address "localhost:8889"
+    HTTPServer server = new HTTPServer("localhost", 8889, true);
+  }
+}
diff --git a/examples/src/main/java/io/opencensus/examples/quickstart/prometheus.yaml b/examples/src/main/java/io/opencensus/examples/quickstart/prometheus.yaml
new file mode 100644
index 0000000..fc40ede
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/quickstart/prometheus.yaml
@@ -0,0 +1,28 @@
+#
+#  Copyright 2018, OpenCensus Authors
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+global:
+  scrape_interval: 10s
+
+  external_labels:
+    monitor: 'demo'
+
+scrape_configs:
+  - job_name: 'demo'
+
+    scrape_interval: 10s
+
+    static_configs:
+      - targets: ['localhost:8889']
\ No newline at end of file
diff --git a/examples/src/main/java/io/opencensus/examples/stats/StackdriverQuickstart.java b/examples/src/main/java/io/opencensus/examples/stats/StackdriverQuickstart.java
new file mode 100644
index 0000000..b29eda4
--- /dev/null
+++ b/examples/src/main/java/io/opencensus/examples/stats/StackdriverQuickstart.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.examples.stats;
+
+import com.google.common.collect.Lists;
+import io.opencensus.exporter.stats.stackdriver.StackdriverStatsExporter;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagKey;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * StackdriverQuickstart is an example of exporting a custom metric from OpenCensus to Stackdriver.
+ */
+public final class StackdriverQuickstart {
+
+  private static final int EXPORT_INTERVAL = 60;
+  private static final MeasureLong LATENCY_MS =
+      MeasureLong.create("task_latency", "The task latency in milliseconds", "ms");
+  // Latency in buckets:
+  // [>=0ms, >=100ms, >=200ms, >=400ms, >=1s, >=2s, >=4s]
+  private static final BucketBoundaries LATENCY_BOUNDARIES =
+      BucketBoundaries.create(Lists.newArrayList(0d, 100d, 200d, 400d, 1000d, 2000d, 4000d));
+  private static final StatsRecorder STATS_RECORDER = Stats.getStatsRecorder();
+
+  /** Main launcher for the Stackdriver example. */
+  public static void main(String[] args) throws IOException, InterruptedException {
+    // Register the view. It is imperative that this step exists,
+    // otherwise recorded metrics will be dropped and never exported.
+    View view =
+        View.create(
+            Name.create("task_latency_distribution"),
+            "The distribution of the task latencies.",
+            LATENCY_MS,
+            Aggregation.Distribution.create(LATENCY_BOUNDARIES),
+            Collections.<TagKey>emptyList());
+
+    // Create the view manager
+    ViewManager viewManager = Stats.getViewManager();
+
+    // Then finally register the views
+    viewManager.registerView(view);
+
+    // [START setup_exporter]
+    // Enable OpenCensus exporters to export metrics to Stackdriver Monitoring.
+    // Exporters use Application Default Credentials to authenticate.
+    // See https://developers.google.com/identity/protocols/application-default-credentials
+    // for more details.
+    StackdriverStatsExporter.createAndRegister();
+    // [END setup_exporter]
+
+    // Record 100 fake latency values between 0 and 5 seconds.
+    Random rand = new Random();
+    for (int i = 0; i < 100; i++) {
+      long ms = (long) (TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS) * rand.nextDouble());
+      System.out.println(String.format("Latency %d: %d", i, ms));
+      STATS_RECORDER.newMeasureMap().put(LATENCY_MS, ms).record();
+    }
+
+    // The default export interval is 60 seconds. The thread with the StackdriverStatsExporter must
+    // live for at least the interval past any metrics that must be collected, or some risk being
+    // lost if they are recorded after the last export.
+
+    System.out.println(
+        String.format(
+            "Sleeping %d seconds before shutdown to ensure all records are flushed.",
+            EXPORT_INTERVAL));
+    Thread.sleep(TimeUnit.MILLISECONDS.convert(EXPORT_INTERVAL, TimeUnit.SECONDS));
+  }
+}
diff --git a/exporters/metrics/ocagent/README.md b/exporters/metrics/ocagent/README.md
new file mode 100644
index 0000000..fe471cc
--- /dev/null
+++ b/exporters/metrics/ocagent/README.md
@@ -0,0 +1,65 @@
+# OpenCensus Java OC-Agent Metrics Exporter
+
+The *OpenCensus Java OC-Agent Metrics Exporter* is the Java implementation of the OpenCensus Agent
+(OC-Agent) Metrics Exporter.
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.22.0</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-exporter-metrics-ocagent</artifactId>
+    <version>0.22.0</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-impl</artifactId>
+    <version>0.22.0</version>
+    <scope>runtime</scope>
+  </dependency>
+  <dependency>
+    <groupId>io.netty</groupId>
+    <artifactId>netty-tcnative-boringssl-static</artifactId>
+    <version>2.0.20.Final</version>
+    <scope>runtime</scope>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.22.0'
+compile 'io.opencensus:opencensus-exporter-metrics-ocagent:0.22.0'
+runtime 'io.opencensus:opencensus-impl:0.22.0'
+runtime 'io.netty:netty-tcnative-boringssl-static:2.0.20.Final'
+```
+
+### Register the exporter
+
+```java
+import io.opencensus.exporter.metrics.ocagent.OcAgentMetricsExporter;
+
+public class MyMainClass {
+  public static void main(String[] args) throws Exception {
+    OcAgentMetricsExporter.createAndRegister();
+    // ...
+  }
+}
+```
+
+## FAQ
+
+### Why is `netty-tcnative-boringssl-static` needed? Which version should I use?
+
+It's because this artifact depends on `grpc-netty` which requires a runtime dependency on `netty-tcnative-boringssl-static`
+for it to work. For more details and a table of known working version combinations, see
+https://github.com/grpc/grpc-java/blob/master/SECURITY.md#netty.
\ No newline at end of file
diff --git a/exporters/metrics/ocagent/build.gradle b/exporters/metrics/ocagent/build.gradle
new file mode 100644
index 0000000..6a4dbdf
--- /dev/null
+++ b/exporters/metrics/ocagent/build.gradle
@@ -0,0 +1,38 @@
+description = 'OpenCensus Java OC-Agent Metrics Exporter'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.7
+    it.targetCompatibility = 1.7
+}
+
+dependencies {
+    compileOnly libraries.auto_value
+
+    compile project(':opencensus-api'),
+            project(':opencensus-contrib-resource-util')
+
+    compile (libraries.grpc_core) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.grpc_stub) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.grpc_netty) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.opencensus_proto) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    testRuntimeOnly project(':opencensus-impl'),
+                    project(':opencensus-impl-core')
+
+    signature "org.codehaus.mojo.signature:java17:1.0@signature"
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtils.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtils.java
new file mode 100644
index 0000000..eef3d31
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtils.java
@@ -0,0 +1,274 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import com.google.protobuf.DoubleValue;
+import com.google.protobuf.Int64Value;
+import io.opencensus.common.Function;
+import io.opencensus.common.Functions;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.Exemplar;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.proto.metrics.v1.DistributionValue;
+import io.opencensus.proto.metrics.v1.LabelKey;
+import io.opencensus.proto.metrics.v1.LabelValue;
+import io.opencensus.proto.metrics.v1.Metric;
+import io.opencensus.proto.metrics.v1.MetricDescriptor;
+import io.opencensus.proto.metrics.v1.Point;
+import io.opencensus.proto.metrics.v1.SummaryValue;
+import io.opencensus.proto.metrics.v1.TimeSeries;
+import io.opencensus.proto.resource.v1.Resource;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+
+/** Utilities for converting Metrics APIs in OpenCensus Java to OpenCensus Metrics Proto. */
+final class MetricsProtoUtils {
+
+  // TODO(songya): determine if we should make the optimization on not sending already-existed
+  // MetricDescriptors.
+  static Metric toMetricProto(
+      io.opencensus.metrics.export.Metric metric,
+      @Nullable io.opencensus.resource.Resource resource) {
+    Metric.Builder builder = Metric.newBuilder();
+    builder.setMetricDescriptor(toMetricDescriptorProto(metric.getMetricDescriptor()));
+    for (io.opencensus.metrics.export.TimeSeries timeSeries : metric.getTimeSeriesList()) {
+      builder.addTimeseries(toTimeSeriesProto(timeSeries));
+    }
+    if (resource != null) {
+      builder.setResource(toResourceProto(resource));
+    }
+    return builder.build();
+  }
+
+  private static MetricDescriptor toMetricDescriptorProto(
+      io.opencensus.metrics.export.MetricDescriptor metricDescriptor) {
+    MetricDescriptor.Builder builder = MetricDescriptor.newBuilder();
+    builder
+        .setName(metricDescriptor.getName())
+        .setDescription(metricDescriptor.getDescription())
+        .setUnit(metricDescriptor.getUnit())
+        .setType(toTypeProto(metricDescriptor.getType()));
+    for (io.opencensus.metrics.LabelKey labelKey : metricDescriptor.getLabelKeys()) {
+      builder.addLabelKeys(toLabelKeyProto(labelKey));
+    }
+    return builder.build();
+  }
+
+  private static MetricDescriptor.Type toTypeProto(
+      io.opencensus.metrics.export.MetricDescriptor.Type type) {
+    switch (type) {
+      case CUMULATIVE_INT64:
+        return MetricDescriptor.Type.CUMULATIVE_INT64;
+      case CUMULATIVE_DOUBLE:
+        return MetricDescriptor.Type.CUMULATIVE_DOUBLE;
+      case CUMULATIVE_DISTRIBUTION:
+        return MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION;
+      case GAUGE_INT64:
+        return MetricDescriptor.Type.GAUGE_INT64;
+      case GAUGE_DOUBLE:
+        return MetricDescriptor.Type.GAUGE_DOUBLE;
+      case GAUGE_DISTRIBUTION:
+        return MetricDescriptor.Type.GAUGE_DISTRIBUTION;
+      case SUMMARY:
+        return MetricDescriptor.Type.SUMMARY;
+    }
+    return MetricDescriptor.Type.UNRECOGNIZED;
+  }
+
+  private static LabelKey toLabelKeyProto(io.opencensus.metrics.LabelKey labelKey) {
+    return LabelKey.newBuilder()
+        .setKey(labelKey.getKey())
+        .setDescription(labelKey.getDescription())
+        .build();
+  }
+
+  private static Resource toResourceProto(io.opencensus.resource.Resource resource) {
+    Resource.Builder builder = Resource.newBuilder();
+    if (resource.getType() != null) {
+      builder.setType(resource.getType());
+    }
+    builder.putAllLabels(resource.getLabels());
+    return builder.build();
+  }
+
+  private static TimeSeries toTimeSeriesProto(io.opencensus.metrics.export.TimeSeries timeSeries) {
+    TimeSeries.Builder builder = TimeSeries.newBuilder();
+    if (timeSeries.getStartTimestamp() != null) {
+      builder.setStartTimestamp(toTimestampProto(timeSeries.getStartTimestamp()));
+    }
+    for (io.opencensus.metrics.LabelValue labelValue : timeSeries.getLabelValues()) {
+      builder.addLabelValues(toLabelValueProto(labelValue));
+    }
+    for (io.opencensus.metrics.export.Point point : timeSeries.getPoints()) {
+      builder.addPoints(toPointProto(point));
+    }
+    return builder.build();
+  }
+
+  private static LabelValue toLabelValueProto(io.opencensus.metrics.LabelValue labelValue) {
+    LabelValue.Builder builder = LabelValue.newBuilder();
+    if (labelValue.getValue() == null) {
+      builder.setHasValue(false);
+    } else {
+      builder.setHasValue(true).setValue(labelValue.getValue());
+    }
+    return builder.build();
+  }
+
+  private static Point toPointProto(io.opencensus.metrics.export.Point point) {
+    final Point.Builder builder = Point.newBuilder();
+    builder.setTimestamp(toTimestampProto(point.getTimestamp()));
+    point
+        .getValue()
+        .match(
+            new Function<Double, Void>() {
+              @Override
+              public Void apply(Double arg) {
+                builder.setDoubleValue(arg);
+                return null;
+              }
+            },
+            new Function<Long, Void>() {
+              @Override
+              public Void apply(Long arg) {
+                builder.setInt64Value(arg);
+                return null;
+              }
+            },
+            new Function<Distribution, Void>() {
+              @Override
+              public Void apply(Distribution arg) {
+                builder.setDistributionValue(toDistributionProto(arg));
+                return null;
+              }
+            },
+            new Function<Summary, Void>() {
+              @Override
+              public Void apply(Summary arg) {
+                builder.setSummaryValue(toSummaryProto(arg));
+                return null;
+              }
+            },
+            Functions.<Void>throwAssertionError());
+    return builder.build();
+  }
+
+  private static DistributionValue toDistributionProto(
+      io.opencensus.metrics.export.Distribution distribution) {
+    DistributionValue.Builder builder = DistributionValue.newBuilder();
+    builder
+        .setSum(distribution.getSum())
+        .setCount(distribution.getCount())
+        .setSumOfSquaredDeviation(distribution.getSumOfSquaredDeviations());
+    if (distribution.getBucketOptions() != null) {
+      builder.setBucketOptions(toBucketOptionsProto(distribution.getBucketOptions()));
+    }
+    for (io.opencensus.metrics.export.Distribution.Bucket bucket : distribution.getBuckets()) {
+      builder.addBuckets(toBucketProto(bucket));
+    }
+    return builder.build();
+  }
+
+  // TODO(songya): determine if we should make the optimization on not sending already-existed
+  // BucketOptions.
+  private static DistributionValue.BucketOptions toBucketOptionsProto(
+      Distribution.BucketOptions bucketOptions) {
+    final DistributionValue.BucketOptions.Builder builder =
+        DistributionValue.BucketOptions.newBuilder();
+    bucketOptions.match(
+        new Function<Distribution.BucketOptions.ExplicitOptions, Void>() {
+          @Override
+          public Void apply(Distribution.BucketOptions.ExplicitOptions arg) {
+            builder.setExplicit(
+                DistributionValue.BucketOptions.Explicit.newBuilder()
+                    .addAllBounds(arg.getBucketBoundaries())
+                    .build());
+            return null;
+          }
+        },
+        Functions.<Void>throwAssertionError());
+    return builder.build();
+  }
+
+  private static DistributionValue.Bucket toBucketProto(
+      io.opencensus.metrics.export.Distribution.Bucket bucket) {
+    DistributionValue.Bucket.Builder builder =
+        DistributionValue.Bucket.newBuilder().setCount(bucket.getCount());
+    Exemplar exemplar = bucket.getExemplar();
+    if (exemplar != null) {
+      builder.setExemplar(toExemplarProto(exemplar));
+    }
+    return builder.build();
+  }
+
+  private static DistributionValue.Exemplar toExemplarProto(Exemplar exemplar) {
+    Map<String, String> stringAttachments = new HashMap<>();
+    for (Entry<String, AttachmentValue> entry : exemplar.getAttachments().entrySet()) {
+      stringAttachments.put(entry.getKey(), entry.getValue().getValue());
+    }
+    return DistributionValue.Exemplar.newBuilder()
+        .setValue(exemplar.getValue())
+        .setTimestamp(toTimestampProto(exemplar.getTimestamp()))
+        .putAllAttachments(stringAttachments)
+        .build();
+  }
+
+  private static SummaryValue toSummaryProto(io.opencensus.metrics.export.Summary summary) {
+    SummaryValue.Builder builder = SummaryValue.newBuilder();
+    if (summary.getSum() != null) {
+      builder.setSum(DoubleValue.of(summary.getSum()));
+    }
+    if (summary.getCount() != null) {
+      builder.setCount(Int64Value.of(summary.getCount()));
+    }
+    builder.setSnapshot(toSnapshotProto(summary.getSnapshot()));
+    return builder.build();
+  }
+
+  private static SummaryValue.Snapshot toSnapshotProto(
+      io.opencensus.metrics.export.Summary.Snapshot snapshot) {
+    SummaryValue.Snapshot.Builder builder = SummaryValue.Snapshot.newBuilder();
+    if (snapshot.getSum() != null) {
+      builder.setSum(DoubleValue.of(snapshot.getSum()));
+    }
+    if (snapshot.getCount() != null) {
+      builder.setCount(Int64Value.of(snapshot.getCount()));
+    }
+    for (io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile valueAtPercentile :
+        snapshot.getValueAtPercentiles()) {
+      builder.addPercentileValues(
+          SummaryValue.Snapshot.ValueAtPercentile.newBuilder()
+              .setValue(valueAtPercentile.getValue())
+              .setPercentile(valueAtPercentile.getPercentile())
+              .build());
+    }
+    return builder.build();
+  }
+
+  static com.google.protobuf.Timestamp toTimestampProto(Timestamp timestamp) {
+    return com.google.protobuf.Timestamp.newBuilder()
+        .setSeconds(timestamp.getSeconds())
+        .setNanos(timestamp.getNanos())
+        .build();
+  }
+
+  private MetricsProtoUtils() {}
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporter.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporter.java
new file mode 100644
index 0000000..9a293c2
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporter.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import io.netty.handler.ssl.SslContext;
+import io.opencensus.common.Duration;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.metrics.export.MetricProducerManager;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
+
+/**
+ * The implementation of the OpenCensus Agent (OC-Agent) Metrics Exporter.
+ *
+ * <p>Example of usage:
+ *
+ * <pre>{@code
+ * public static void main(String[] args) {
+ *   OcAgentMetricsExporter.createAndRegister(
+ *     OcAgentMetricsExporterConfiguration.builder().build());
+ *   ... // Do work.
+ * }
+ * }</pre>
+ *
+ * @since 0.20
+ */
+@ThreadSafe
+public final class OcAgentMetricsExporter {
+
+  private static final Object monitor = new Object();
+
+  @GuardedBy("monitor")
+  @Nullable
+  private static OcAgentMetricsExporter exporter = null;
+
+  private final Thread workerThread;
+
+  /**
+   * Creates a {@code OcAgentMetricsExporterHandler} with the given configurations and registers it
+   * to the OpenCensus library.
+   *
+   * @param configuration the {@code OcAgentMetricsExporterConfiguration}.
+   * @since 0.20
+   */
+  public static void createAndRegister(OcAgentMetricsExporterConfiguration configuration) {
+    checkNotNull(configuration, "configuration");
+    createInternal(
+        configuration.getEndPoint(),
+        configuration.getUseInsecure(),
+        configuration.getSslContext(),
+        configuration.getServiceName(),
+        configuration.getExportInterval(),
+        configuration.getRetryInterval());
+  }
+
+  private static void createInternal(
+      String endPoint,
+      boolean useInsecure,
+      @Nullable SslContext sslContext,
+      String serviceName,
+      Duration exportInterval,
+      Duration retryInterval) {
+    checkArgument(
+        useInsecure == (sslContext == null), "Either use insecure or provide a valid SslContext.");
+    synchronized (monitor) {
+      checkState(exporter == null, "OcAgent Metrics exporter is already created.");
+      exporter =
+          new OcAgentMetricsExporter(
+              endPoint,
+              useInsecure,
+              sslContext,
+              serviceName,
+              exportInterval,
+              retryInterval,
+              Metrics.getExportComponent().getMetricProducerManager());
+      exporter.workerThread.start();
+    }
+  }
+
+  private OcAgentMetricsExporter(
+      String endPoint,
+      Boolean useInsecure,
+      @Nullable SslContext sslContext,
+      String serviceName,
+      Duration exportInterval,
+      Duration retryInterval,
+      MetricProducerManager metricProducerManager) {
+    OcAgentMetricsExporterWorker worker =
+        new OcAgentMetricsExporterWorker(
+            endPoint,
+            useInsecure,
+            sslContext,
+            exportInterval,
+            retryInterval,
+            serviceName,
+            metricProducerManager);
+    workerThread = new Thread(worker);
+    workerThread.setDaemon(true);
+    workerThread.setName("OcAgentMetricsExporterWorker");
+  }
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfiguration.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfiguration.java
new file mode 100644
index 0000000..d8197f3
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfiguration.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.netty.handler.ssl.SslContext;
+import io.opencensus.common.Duration;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@code OcAgentMetricsExporter}.
+ *
+ * @since 0.20
+ */
+@AutoValue
+@Immutable
+public abstract class OcAgentMetricsExporterConfiguration {
+
+  @VisibleForTesting static final String DEFAULT_END_POINT = "localhost:55678";
+  @VisibleForTesting static final String DEFAULT_SERVICE_NAME = "OpenCensus";
+  @VisibleForTesting static final Duration DEFAULT_RETRY_INTERVAL = Duration.create(300, 0);
+  @VisibleForTesting static final Duration DEFAULT_EXPORT_INTERVAL = Duration.create(60, 0);
+  @VisibleForTesting static final Duration ZERO = Duration.create(0, 0);
+
+  OcAgentMetricsExporterConfiguration() {}
+
+  /**
+   * Returns the end point of OC-Agent. The end point can be dns, ip:port, etc.
+   *
+   * <p>Default value is "localhost:55678" if not set.
+   *
+   * @return the end point of OC-Agent.
+   * @since 0.20
+   */
+  public abstract String getEndPoint();
+
+  /**
+   * Returns whether to disable client transport security for the exporter's gRPC connection or not.
+   *
+   * <p>Default value is true if not set.
+   *
+   * @return whether to disable client transport security for the exporter's gRPC connection or not.
+   * @since 0.20
+   */
+  public abstract Boolean getUseInsecure();
+
+  /**
+   * Returns the {@link SslContext} for secure TLS gRPC connection.
+   *
+   * <p>If not set OcAgent exporter will use insecure connection by default.
+   *
+   * @return the {@code SslContext}.
+   * @since 0.20
+   */
+  @Nullable
+  public abstract SslContext getSslContext();
+
+  /**
+   * Returns the service name to be used for the {@code OcAgentMetricsExporter}.
+   *
+   * <p>Default value is "OpenCensus" if not set.
+   *
+   * @return the service name.
+   * @since 0.20
+   */
+  public abstract String getServiceName();
+
+  /**
+   * Returns the retry time interval when trying to connect to Agent.
+   *
+   * <p>Default value is 5 minutes.
+   *
+   * @return the retry time interval.
+   * @since 0.20
+   */
+  public abstract Duration getRetryInterval();
+
+  /**
+   * Returns the export interval between pushes to Agent.
+   *
+   * <p>Default value is 1 minute.
+   *
+   * @return the export interval.
+   * @since 0.20
+   */
+  public abstract Duration getExportInterval();
+
+  /**
+   * Returns a new {@link Builder}.
+   *
+   * @return a {@code Builder}.
+   * @since 0.20
+   */
+  public static Builder builder() {
+    return new AutoValue_OcAgentMetricsExporterConfiguration.Builder()
+        .setEndPoint(DEFAULT_END_POINT)
+        .setServiceName(DEFAULT_SERVICE_NAME)
+        .setRetryInterval(DEFAULT_RETRY_INTERVAL)
+        .setExportInterval(DEFAULT_EXPORT_INTERVAL)
+        .setUseInsecure(true);
+  }
+
+  /**
+   * Builder for {@link OcAgentMetricsExporterConfiguration}.
+   *
+   * @since 0.20
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    Builder() {}
+
+    /**
+     * Sets the end point of OC-Agent server.
+     *
+     * @param endPoint the end point of OC-Agent.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setEndPoint(String endPoint);
+
+    /**
+     * Sets whether to disable client transport security for the exporter's gRPC connection or not.
+     *
+     * @param useInsecure whether disable client transport security for the exporter's gRPC
+     *     connection.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setUseInsecure(Boolean useInsecure);
+
+    /**
+     * Sets the {@link SslContext} for secure TLS gRPC connection.
+     *
+     * @param sslContext the {@code SslContext}.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setSslContext(SslContext sslContext);
+
+    /**
+     * Sets the service name to be used for the {@code OcAgentMetricsExporter}.
+     *
+     * @param serviceName the service name.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setServiceName(String serviceName);
+
+    /**
+     * Sets the retry time interval when trying to connect to Agent.
+     *
+     * @param retryInterval the retry time interval.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setRetryInterval(Duration retryInterval);
+
+    /**
+     * Sets the export time interval between pushes to Agent.
+     *
+     * @param exportInterval the export time interval.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setExportInterval(Duration exportInterval);
+
+    // TODO(songya): add an option that controls whether to always keep the RPC connection alive.
+
+    abstract Duration getRetryInterval();
+
+    abstract Duration getExportInterval();
+
+    abstract OcAgentMetricsExporterConfiguration autoBuild();
+
+    /**
+     * Builds a {@link OcAgentMetricsExporterConfiguration}.
+     *
+     * @return a {@code OcAgentMetricsExporterConfiguration}.
+     * @since 0.20
+     */
+    public OcAgentMetricsExporterConfiguration build() {
+      Preconditions.checkArgument(
+          getRetryInterval().compareTo(ZERO) > 0, "Retry interval must be positive.");
+      Preconditions.checkArgument(
+          getExportInterval().compareTo(ZERO) > 0, "Export interval must be positive.");
+      return autoBuild();
+    }
+  }
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterWorker.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterWorker.java
new file mode 100644
index 0000000..9195134
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterWorker.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import com.google.common.collect.Lists;
+import io.grpc.ManagedChannel;
+import io.grpc.ManagedChannelBuilder;
+import io.grpc.netty.NegotiationType;
+import io.grpc.netty.NettyChannelBuilder;
+import io.netty.handler.ssl.SslContext;
+import io.opencensus.common.Duration;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.MetricProducerManager;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
+import io.opencensus.proto.agent.metrics.v1.MetricsServiceGrpc;
+import io.opencensus.proto.resource.v1.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.NotThreadSafe;
+
+/**
+ * Worker {@code Runnable} that polls Metric from Metrics library and batch export to Agent.
+ *
+ * <p>{@code OcAgentMetricsExporterWorker} will be started in a daemon {@code Thread}.
+ *
+ * <p>The state of this class should only be accessed from the thread which {@link
+ * OcAgentMetricsExporterWorker} resides in.
+ */
+@NotThreadSafe
+final class OcAgentMetricsExporterWorker implements Runnable {
+
+  private static final Logger logger =
+      Logger.getLogger(OcAgentMetricsExporterWorker.class.getName());
+
+  private final String endPoint;
+  private final boolean useInsecure;
+  @Nullable private final SslContext sslContext;
+  private final long exportIntervalMillis;
+  private final long retryIntervalMillis;
+  private final String serviceName;
+  private final MetricProducerManager metricProducerManager;
+  private OcAgentMetricsServiceExportRpcHandler exportRpcHandler;
+  // private final Set<MetricDescriptor> registeredDescriptors = new HashSet<>();
+
+  OcAgentMetricsExporterWorker(
+      String endPoint,
+      boolean useInsecure,
+      @Nullable SslContext sslContext,
+      Duration exportInterval,
+      Duration retryInterval,
+      String serviceName,
+      MetricProducerManager metricProducerManager) {
+    this.endPoint = endPoint;
+    this.useInsecure = useInsecure;
+    this.sslContext = sslContext;
+    this.exportIntervalMillis = exportInterval.toMillis();
+    this.retryIntervalMillis = retryInterval.toMillis();
+    this.serviceName = serviceName;
+    this.metricProducerManager = metricProducerManager;
+  }
+
+  @Override
+  public void run() {
+    while (true) {
+      connect();
+      while (exportRpcHandler != null && !exportRpcHandler.isCompleted()) {
+        export();
+        sleep(exportIntervalMillis);
+      }
+      if (exportRpcHandler != null && exportRpcHandler.getTerminateStatus() != null) {
+        TerminateStatusRunnable runnable =
+            new TerminateStatusRunnable(exportRpcHandler.getTerminateStatus(), "Export");
+        new Thread(runnable).start();
+      }
+      sleep(retryIntervalMillis);
+    }
+  }
+
+  private void connect() {
+    ManagedChannelBuilder<?> channelBuilder;
+    if (useInsecure) {
+      channelBuilder = ManagedChannelBuilder.forTarget(endPoint).usePlaintext();
+    } else {
+      channelBuilder =
+          NettyChannelBuilder.forTarget(endPoint)
+              .negotiationType(NegotiationType.TLS)
+              .sslContext(sslContext);
+    }
+    ManagedChannel channel = channelBuilder.build();
+    MetricsServiceGrpc.MetricsServiceStub stub = MetricsServiceGrpc.newStub(channel);
+    exportRpcHandler = OcAgentMetricsServiceExportRpcHandler.create(stub);
+    ExportMetricsServiceRequest.Builder builder =
+        ExportMetricsServiceRequest.newBuilder().setNode(OcAgentNodeUtils.getNodeInfo(serviceName));
+    @Nullable Resource resourceProto = OcAgentNodeUtils.getAutoDetectedResourceProto();
+    if (resourceProto != null) {
+      builder.setResource(resourceProto);
+    }
+    exportRpcHandler.onExport(builder.build());
+  }
+
+  // Polls MetricProducerManager from Metrics library for all registered MetricDescriptors,
+  // converts them to proto, then exports them to OC-Agent.
+  private void export() {
+    if (exportRpcHandler == null || exportRpcHandler.isCompleted()) {
+      return;
+    }
+
+    ArrayList<Metric> metricsList = Lists.newArrayList();
+    for (MetricProducer metricProducer : metricProducerManager.getAllMetricProducer()) {
+      metricsList.addAll(metricProducer.getMetrics());
+    }
+
+    List<io.opencensus.proto.metrics.v1.Metric> metricProtos = Lists.newArrayList();
+    for (Metric metric : metricsList) {
+      // TODO(songya): determine if we should make the optimization on not sending already-existed
+      // MetricDescriptors.
+      // boolean registered = true;
+      // if (!registeredDescriptors.contains(metric.getMetricDescriptor())) {
+      //   registered = false;
+      //   registeredDescriptors.add(metric.getMetricDescriptor());
+      // }
+      metricProtos.add(MetricsProtoUtils.toMetricProto(metric, null));
+    }
+
+    exportRpcHandler.onExport(
+        // For now don't include Resource in the following messages, i.e don't allow Resource to
+        // mutate after the initial message.
+        ExportMetricsServiceRequest.newBuilder().addAllMetrics(metricProtos).build());
+  }
+
+  private static void sleep(long timeInMillis) {
+    try {
+      Thread.sleep(timeInMillis);
+    } catch (InterruptedException e) {
+      logger.log(Level.INFO, "OcAgentMetricsExporterWorker is interrupted.", e);
+      Thread.currentThread().interrupt();
+    }
+  }
+
+  private static final class TerminateStatusRunnable implements Runnable {
+    private final io.grpc.Status status;
+    private final String rpcName;
+
+    TerminateStatusRunnable(io.grpc.Status status, String rpcName) {
+      this.status = status;
+      this.rpcName = rpcName;
+    }
+
+    @Override
+    public void run() {
+      logger.log(Level.INFO, "RPC " + rpcName + " terminated with Status ", status);
+    }
+  }
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandler.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandler.java
new file mode 100644
index 0000000..988ad20
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandler.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.grpc.Status;
+import io.grpc.StatusRuntimeException;
+import io.grpc.stub.StreamObserver;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceResponse;
+import io.opencensus.proto.agent.metrics.v1.MetricsServiceGrpc.MetricsServiceStub;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
+
+/** Handler of export service RPC. */
+@ThreadSafe
+final class OcAgentMetricsServiceExportRpcHandler {
+
+  private static final Logger logger =
+      Logger.getLogger(OcAgentMetricsServiceExportRpcHandler.class.getName());
+
+  // A reference to the exportRequestObserver returned from stub.
+  @GuardedBy("this")
+  @Nullable
+  private StreamObserver<ExportMetricsServiceRequest> exportRequestObserver;
+
+  // The RPC status when this stream finishes/disconnects. Null if the stream is still connected.
+  @GuardedBy("this")
+  @Nullable
+  private Status terminateStatus;
+
+  private OcAgentMetricsServiceExportRpcHandler() {}
+
+  private synchronized void setExportRequestObserver(
+      StreamObserver<ExportMetricsServiceRequest> exportRequestObserver) {
+    this.exportRequestObserver = exportRequestObserver;
+  }
+
+  // Creates an OcAgentMetricsServiceExportRpcHandler. Tries to initiate the export stream with the
+  // given MetricsServiceStub.
+  static OcAgentMetricsServiceExportRpcHandler create(MetricsServiceStub stub) {
+    OcAgentMetricsServiceExportRpcHandler exportRpcHandler =
+        new OcAgentMetricsServiceExportRpcHandler();
+    ExportResponseObserver exportResponseObserver = new ExportResponseObserver(exportRpcHandler);
+    try {
+      StreamObserver<ExportMetricsServiceRequest> exportRequestObserver =
+          stub.export(exportResponseObserver);
+      exportRpcHandler.setExportRequestObserver(exportRequestObserver);
+    } catch (StatusRuntimeException e) {
+      exportRpcHandler.onComplete(e);
+    }
+    return exportRpcHandler;
+  }
+
+  // Sends the export request to Agent if the stream is still connected, otherwise do nothing.
+  synchronized void onExport(ExportMetricsServiceRequest request) {
+    if (isCompleted() || exportRequestObserver == null) {
+      return;
+    }
+    try {
+      exportRequestObserver.onNext(request);
+    } catch (Exception e) { // Catch client side exceptions.
+      onComplete(e);
+    }
+  }
+
+  // Marks this export stream as completed with an optional error.
+  // Once onComplete is called, this OcAgentMetricsServiceExportRpcHandler instance can be discarded
+  // and GC'ed in the worker thread.
+  synchronized void onComplete(@javax.annotation.Nullable Throwable error) {
+    if (isCompleted()) {
+      return;
+    }
+    // TODO(songya): add Runnable
+    Status status;
+    if (error == null) {
+      status = Status.OK;
+    } else if (error instanceof StatusRuntimeException) {
+      status = ((StatusRuntimeException) error).getStatus();
+    } else {
+      status = Status.UNKNOWN;
+    }
+    terminateStatus = status;
+  }
+
+  synchronized boolean isCompleted() {
+    return terminateStatus != null;
+  }
+
+  @Nullable
+  synchronized Status getTerminateStatus() {
+    return terminateStatus;
+  }
+
+  @VisibleForTesting
+  static class ExportResponseObserver implements StreamObserver<ExportMetricsServiceResponse> {
+
+    private final OcAgentMetricsServiceExportRpcHandler exportRpcHandler;
+
+    ExportResponseObserver(OcAgentMetricsServiceExportRpcHandler exportRpcHandler) {
+      this.exportRpcHandler = exportRpcHandler;
+    }
+
+    @Override
+    public void onNext(ExportMetricsServiceResponse value) {
+      // Do nothing since ExportMetricsServiceResponse is an empty message.
+    }
+
+    @Override
+    public void onError(Throwable t) {
+      logger.log(Level.WARNING, "Export stream is disconnected.", t);
+      exportRpcHandler.onComplete(t);
+    }
+
+    @Override
+    public void onCompleted() {
+      exportRpcHandler.onComplete(null);
+    }
+  }
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentNodeUtils.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentNodeUtils.java
new file mode 100644
index 0000000..ef157eb
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/OcAgentNodeUtils.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.OpenCensusLibraryInformation;
+import io.opencensus.common.Timestamp;
+import io.opencensus.contrib.resource.util.ResourceUtils;
+import io.opencensus.proto.agent.common.v1.LibraryInfo;
+import io.opencensus.proto.agent.common.v1.LibraryInfo.Language;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.common.v1.ProcessIdentifier;
+import io.opencensus.proto.agent.common.v1.ServiceInfo;
+import io.opencensus.proto.resource.v1.Resource;
+import java.lang.management.ManagementFactory;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.security.SecureRandom;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+
+/** Utilities for detecting and creating {@link Node}. */
+// TODO(songya): extract the utilities to a common artifact.
+final class OcAgentNodeUtils {
+
+  // The current version of the OpenCensus OC-Agent Exporter.
+  @VisibleForTesting
+  static final String OC_AGENT_EXPORTER_VERSION = "0.32.0-SNAPSHOT"; // CURRENT_OPENCENSUS_VERSION
+
+  @Nullable
+  private static final io.opencensus.resource.Resource AUTO_DETECTED_RESOURCE =
+      ResourceUtils.detectResource();
+
+  // Creates a Node with information from the OpenCensus library and environment variables.
+  static Node getNodeInfo(String serviceName) {
+    String jvmName = ManagementFactory.getRuntimeMXBean().getName();
+    Timestamp censusTimestamp = Timestamp.fromMillis(System.currentTimeMillis());
+    return Node.newBuilder()
+        .setIdentifier(getProcessIdentifier(jvmName, censusTimestamp))
+        .setLibraryInfo(getLibraryInfo(OpenCensusLibraryInformation.VERSION))
+        .setServiceInfo(getServiceInfo(serviceName))
+        .build();
+  }
+
+  // Creates process identifier with the given JVM name and start time.
+  @VisibleForTesting
+  static ProcessIdentifier getProcessIdentifier(String jvmName, Timestamp censusTimestamp) {
+    String hostname;
+    int pid;
+    // jvmName should be something like '<pid>@<hostname>', at least in Oracle and OpenJdk JVMs
+    int delimiterIndex = jvmName.indexOf('@');
+    if (delimiterIndex < 1) {
+      // Not the expected format, generate a random number.
+      try {
+        hostname = InetAddress.getLocalHost().getHostName();
+      } catch (UnknownHostException e) {
+        hostname = "localhost";
+      }
+      // Generate a random number as the PID.
+      pid = new SecureRandom().nextInt();
+    } else {
+      hostname = jvmName.substring(delimiterIndex + 1, jvmName.length());
+      try {
+        pid = Integer.parseInt(jvmName.substring(0, delimiterIndex));
+      } catch (NumberFormatException e) {
+        // Generate a random number as the PID if format is unexpected.
+        pid = new SecureRandom().nextInt();
+      }
+    }
+
+    return ProcessIdentifier.newBuilder()
+        .setHostName(hostname)
+        .setPid(pid)
+        .setStartTimestamp(MetricsProtoUtils.toTimestampProto(censusTimestamp))
+        .build();
+  }
+
+  // Creates library info with the given OpenCensus Java version.
+  @VisibleForTesting
+  static LibraryInfo getLibraryInfo(String currentOcJavaVersion) {
+    return LibraryInfo.newBuilder()
+        .setLanguage(Language.JAVA)
+        .setCoreLibraryVersion(currentOcJavaVersion)
+        .setExporterVersion(OC_AGENT_EXPORTER_VERSION)
+        .build();
+  }
+
+  // Creates service info with the given service name.
+  @VisibleForTesting
+  static ServiceInfo getServiceInfo(String serviceName) {
+    return ServiceInfo.newBuilder().setName(serviceName).build();
+  }
+
+  @Nullable
+  static Resource getAutoDetectedResourceProto() {
+    return toResourceProto(AUTO_DETECTED_RESOURCE);
+  }
+
+  // Converts a Java Resource object to a Resource proto.
+  @Nullable
+  @VisibleForTesting
+  static Resource toResourceProto(@Nullable io.opencensus.resource.Resource resource) {
+    if (resource == null || resource.getType() == null) {
+      return null;
+    } else {
+      Resource.Builder resourceProtoBuilder = Resource.newBuilder();
+      resourceProtoBuilder.setType(resource.getType());
+      for (Entry<String, String> keyValuePairs : resource.getLabels().entrySet()) {
+        resourceProtoBuilder.putLabels(keyValuePairs.getKey(), keyValuePairs.getValue());
+      }
+      return resourceProtoBuilder.build();
+    }
+  }
+
+  private OcAgentNodeUtils() {}
+}
diff --git a/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/package-info.java b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/package-info.java
new file mode 100644
index 0000000..741e333
--- /dev/null
+++ b/exporters/metrics/ocagent/src/main/java/io/opencensus/exporter/metrics/ocagent/package-info.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package contains the Java implementation of the OpenCensus Agent (OC-Agent) Metrics
+ * Exporter.
+ *
+ * <p>WARNING: Currently all the public classes under this package are marked as {@link
+ * io.opencensus.common.ExperimentalApi}. The classes and APIs under {@link
+ * io.opencensus.exporter.metrics.ocagent} are likely to get backwards-incompatible updates in the
+ * future. DO NOT USE except for experimental purposes.
+ *
+ * <p>See more details on
+ * https://github.com/census-instrumentation/opencensus-proto/tree/master/src/opencensus/proto/agent.
+ */
+@io.opencensus.common.ExperimentalApi
+package io.opencensus.exporter.metrics.ocagent;
diff --git a/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/FakeOcAgentMetricsServiceGrpcImpl.java b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/FakeOcAgentMetricsServiceGrpcImpl.java
new file mode 100644
index 0000000..c012768
--- /dev/null
+++ b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/FakeOcAgentMetricsServiceGrpcImpl.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import io.grpc.stub.StreamObserver;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceResponse;
+import io.opencensus.proto.agent.metrics.v1.MetricsServiceGrpc;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Logger;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
+
+/** Fake implementation of {@link MetricsServiceGrpc}. Used for unit tests only. */
+@ThreadSafe
+final class FakeOcAgentMetricsServiceGrpcImpl extends MetricsServiceGrpc.MetricsServiceImplBase {
+
+  private static final Logger logger =
+      Logger.getLogger(FakeOcAgentMetricsServiceGrpcImpl.class.getName());
+
+  @GuardedBy("this")
+  private final List<ExportMetricsServiceRequest> exportMetricsServiceRequests = new ArrayList<>();
+
+  @GuardedBy("this")
+  private final StreamObserver<ExportMetricsServiceRequest> exportRequestObserver =
+      new StreamObserver<ExportMetricsServiceRequest>() {
+        @Override
+        public void onNext(ExportMetricsServiceRequest value) {
+          addExportRequest(value);
+        }
+
+        @Override
+        public void onError(Throwable t) {
+          logger.warning("Exception thrown for export stream: " + t);
+        }
+
+        @Override
+        public void onCompleted() {}
+      };
+
+  @Override
+  public synchronized StreamObserver<ExportMetricsServiceRequest> export(
+      StreamObserver<ExportMetricsServiceResponse> responseObserver) {
+    return exportRequestObserver;
+  }
+
+  private synchronized void addExportRequest(ExportMetricsServiceRequest request) {
+    exportMetricsServiceRequests.add(request);
+  }
+
+  synchronized List<ExportMetricsServiceRequest> getExportMetricsServiceRequests() {
+    return Collections.unmodifiableList(exportMetricsServiceRequests);
+  }
+}
diff --git a/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtilsTests.java b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtilsTests.java
new file mode 100644
index 0000000..20ddc67
--- /dev/null
+++ b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/MetricsProtoUtilsTests.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.protobuf.DoubleValue;
+import com.google.protobuf.Int64Value;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.Exemplar;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.Distribution.BucketOptions;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.proto.metrics.v1.DistributionValue;
+import io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit;
+import io.opencensus.proto.metrics.v1.SummaryValue;
+import io.opencensus.resource.Resource;
+import java.util.Arrays;
+import java.util.Collections;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Tests for {@link MetricsProtoUtils}. */
+@RunWith(JUnit4.class)
+public class MetricsProtoUtilsTests {
+
+  private static final LabelKey KEY_1 = LabelKey.create("key1", "");
+  private static final LabelKey KEY_2 = LabelKey.create("key2", "");
+  private static final LabelValue VALUE_1 = LabelValue.create("value1");
+  private static final LabelValue VALUE_2 = LabelValue.create("value2");
+  private static final LabelValue VALUE_NULL = LabelValue.create(null);
+  private static final String UNIT = "ms";
+  private static final String METRIC_NAME_1 = "metric1";
+  private static final String METRIC_NAME_2 = "metric2";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final MetricDescriptor DESCRIPTOR_1 =
+      MetricDescriptor.create(
+          METRIC_NAME_1,
+          METRIC_DESCRIPTION,
+          UNIT,
+          Type.CUMULATIVE_DISTRIBUTION,
+          Collections.<LabelKey>singletonList(KEY_1));
+  private static final MetricDescriptor DESCRIPTOR_2 =
+      MetricDescriptor.create(
+          METRIC_NAME_2,
+          METRIC_DESCRIPTION,
+          UNIT,
+          Type.SUMMARY,
+          Arrays.<LabelKey>asList(KEY_1, KEY_2));
+  private static final Timestamp TIMESTAMP_1 = Timestamp.create(10, 0);
+  private static final Timestamp TIMESTAMP_2 = Timestamp.create(25, 0);
+  private static final Timestamp TIMESTAMP_3 = Timestamp.create(30, 0);
+  private static final Timestamp TIMESTAMP_4 = Timestamp.create(50, 0);
+  private static final Timestamp TIMESTAMP_5 = Timestamp.create(100, 0);
+  private static final Distribution DISTRIBUTION =
+      Distribution.create(
+          5,
+          24.0,
+          321.5,
+          BucketOptions.explicitOptions(Arrays.<Double>asList(5.0, 10.0, 15.0)),
+          Arrays.<Bucket>asList(
+              Bucket.create(2),
+              Bucket.create(1),
+              Bucket.create(
+                  2,
+                  Exemplar.create(
+                      11, TIMESTAMP_1, Collections.<String, AttachmentValue>emptyMap())),
+              Bucket.create(0)));
+  private static final Summary SUMMARY =
+      Summary.create(
+          5L,
+          45.0,
+          Snapshot.create(
+              3L, 25.0, Arrays.<ValueAtPercentile>asList(ValueAtPercentile.create(0.4, 11))));
+  private static final Point POINT_DISTRIBUTION =
+      Point.create(Value.distributionValue(DISTRIBUTION), TIMESTAMP_2);
+  private static final Point POINT_SUMMARY = Point.create(Value.summaryValue(SUMMARY), TIMESTAMP_3);
+  private static final TimeSeries TIME_SERIES_1 =
+      TimeSeries.createWithOnePoint(
+          Collections.<LabelValue>singletonList(VALUE_1), POINT_DISTRIBUTION, TIMESTAMP_4);
+  private static final TimeSeries TIME_SERIES_2 =
+      TimeSeries.createWithOnePoint(
+          Arrays.<LabelValue>asList(VALUE_2, VALUE_NULL), POINT_SUMMARY, TIMESTAMP_5);
+  private static final Resource RESOURCE =
+      Resource.create("env", Collections.<String, String>singletonMap("env_key", "env_val"));
+
+  @Test
+  public void toMetricProto_Distribution() {
+    Metric metric = Metric.create(DESCRIPTOR_1, Collections.singletonList(TIME_SERIES_1));
+    io.opencensus.proto.metrics.v1.Metric expected =
+        io.opencensus.proto.metrics.v1.Metric.newBuilder()
+            .setMetricDescriptor(
+                io.opencensus.proto.metrics.v1.MetricDescriptor.newBuilder()
+                    .setName(METRIC_NAME_1)
+                    .setDescription(METRIC_DESCRIPTION)
+                    .setUnit(UNIT)
+                    .setType(
+                        io.opencensus.proto.metrics.v1.MetricDescriptor.Type
+                            .CUMULATIVE_DISTRIBUTION)
+                    .addLabelKeys(
+                        io.opencensus.proto.metrics.v1.LabelKey.newBuilder()
+                            .setKey(KEY_1.getKey())
+                            .setDescription(KEY_1.getDescription())
+                            .build())
+                    .build())
+            .setResource(
+                io.opencensus.proto.resource.v1.Resource.newBuilder()
+                    .setType(RESOURCE.getType())
+                    .putAllLabels(RESOURCE.getLabels())
+                    .build())
+            .addTimeseries(
+                io.opencensus.proto.metrics.v1.TimeSeries.newBuilder()
+                    .setStartTimestamp(MetricsProtoUtils.toTimestampProto(TIMESTAMP_4))
+                    .addLabelValues(
+                        io.opencensus.proto.metrics.v1.LabelValue.newBuilder()
+                            .setHasValue(true)
+                            .setValue(VALUE_1.getValue())
+                            .build())
+                    .addPoints(
+                        io.opencensus.proto.metrics.v1.Point.newBuilder()
+                            .setTimestamp(MetricsProtoUtils.toTimestampProto(TIMESTAMP_2))
+                            .setDistributionValue(
+                                DistributionValue.newBuilder()
+                                    .setCount(5)
+                                    .setSum(24.0)
+                                    .setSumOfSquaredDeviation(321.5)
+                                    .setBucketOptions(
+                                        DistributionValue.BucketOptions.newBuilder()
+                                            .setExplicit(
+                                                Explicit.newBuilder()
+                                                    .addAllBounds(
+                                                        Arrays.<Double>asList(5.0, 10.0, 15.0))
+                                                    .build())
+                                            .build())
+                                    .addBuckets(
+                                        DistributionValue.Bucket.newBuilder().setCount(2).build())
+                                    .addBuckets(
+                                        DistributionValue.Bucket.newBuilder().setCount(1).build())
+                                    .addBuckets(
+                                        DistributionValue.Bucket.newBuilder()
+                                            .setCount(2)
+                                            .setExemplar(
+                                                DistributionValue.Exemplar.newBuilder()
+                                                    .setTimestamp(
+                                                        MetricsProtoUtils.toTimestampProto(
+                                                            TIMESTAMP_1))
+                                                    .setValue(11)
+                                                    .build())
+                                            .build())
+                                    .addBuckets(
+                                        DistributionValue.Bucket.newBuilder().setCount(0).build())
+                                    .build())
+                            .build())
+                    .build())
+            .build();
+    io.opencensus.proto.metrics.v1.Metric actual =
+        MetricsProtoUtils.toMetricProto(metric, RESOURCE);
+    assertThat(actual).isEqualTo(expected);
+  }
+
+  @Test
+  public void toMetricProto_Summary() {
+    Metric metric = Metric.create(DESCRIPTOR_2, Collections.singletonList(TIME_SERIES_2));
+    io.opencensus.proto.metrics.v1.Metric expected =
+        io.opencensus.proto.metrics.v1.Metric.newBuilder()
+            .setMetricDescriptor(
+                io.opencensus.proto.metrics.v1.MetricDescriptor.newBuilder()
+                    .setName(METRIC_NAME_2)
+                    .setDescription(METRIC_DESCRIPTION)
+                    .setUnit(UNIT)
+                    .setType(io.opencensus.proto.metrics.v1.MetricDescriptor.Type.SUMMARY)
+                    .addLabelKeys(
+                        io.opencensus.proto.metrics.v1.LabelKey.newBuilder()
+                            .setKey(KEY_1.getKey())
+                            .setDescription(KEY_1.getDescription())
+                            .build())
+                    .addLabelKeys(
+                        io.opencensus.proto.metrics.v1.LabelKey.newBuilder()
+                            .setKey(KEY_2.getKey())
+                            .setDescription(KEY_2.getDescription())
+                            .build())
+                    .build())
+            .addTimeseries(
+                io.opencensus.proto.metrics.v1.TimeSeries.newBuilder()
+                    .setStartTimestamp(MetricsProtoUtils.toTimestampProto(TIMESTAMP_5))
+                    .addLabelValues(
+                        io.opencensus.proto.metrics.v1.LabelValue.newBuilder()
+                            .setHasValue(true)
+                            .setValue(VALUE_2.getValue())
+                            .build())
+                    .addLabelValues(
+                        io.opencensus.proto.metrics.v1.LabelValue.newBuilder()
+                            .setHasValue(false)
+                            .build())
+                    .addPoints(
+                        io.opencensus.proto.metrics.v1.Point.newBuilder()
+                            .setTimestamp(MetricsProtoUtils.toTimestampProto(TIMESTAMP_3))
+                            .setSummaryValue(
+                                SummaryValue.newBuilder()
+                                    .setCount(Int64Value.of(5))
+                                    .setSum(DoubleValue.of(45.0))
+                                    .setSnapshot(
+                                        SummaryValue.Snapshot.newBuilder()
+                                            .setCount(Int64Value.of(3))
+                                            .setSum(DoubleValue.of(25.0))
+                                            .addPercentileValues(
+                                                SummaryValue.Snapshot.ValueAtPercentile.newBuilder()
+                                                    .setValue(11)
+                                                    .setPercentile(0.4)
+                                                    .build())
+                                            .build())
+                                    .build())
+                            .build())
+                    .build())
+            .build();
+    io.opencensus.proto.metrics.v1.Metric actual = MetricsProtoUtils.toMetricProto(metric, null);
+    assertThat(actual).isEqualTo(expected);
+  }
+}
diff --git a/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfigurationTest.java b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfigurationTest.java
new file mode 100644
index 0000000..e4c7e8b
--- /dev/null
+++ b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterConfigurationTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.netty.handler.ssl.SslContext;
+import io.netty.handler.ssl.SslContextBuilder;
+import io.opencensus.common.Duration;
+import javax.net.ssl.SSLException;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link OcAgentMetricsExporterConfiguration}. */
+@RunWith(JUnit4.class)
+public class OcAgentMetricsExporterConfigurationTest {
+
+  @Test
+  public void defaultConfiguration() {
+    OcAgentMetricsExporterConfiguration configuration =
+        OcAgentMetricsExporterConfiguration.builder().build();
+    assertThat(configuration.getEndPoint())
+        .isEqualTo(OcAgentMetricsExporterConfiguration.DEFAULT_END_POINT);
+    assertThat(configuration.getServiceName())
+        .isEqualTo(OcAgentMetricsExporterConfiguration.DEFAULT_SERVICE_NAME);
+    assertThat(configuration.getUseInsecure()).isTrue();
+    assertThat(configuration.getSslContext()).isNull();
+    assertThat(configuration.getRetryInterval())
+        .isEqualTo(OcAgentMetricsExporterConfiguration.DEFAULT_RETRY_INTERVAL);
+    assertThat(configuration.getExportInterval())
+        .isEqualTo(OcAgentMetricsExporterConfiguration.DEFAULT_EXPORT_INTERVAL);
+  }
+
+  @Test
+  public void setAndGet() throws SSLException {
+    Duration oneMinute = Duration.create(60, 0);
+    Duration fiveMinutes = Duration.create(300, 0);
+    SslContext sslContext = SslContextBuilder.forClient().build();
+    OcAgentMetricsExporterConfiguration configuration =
+        OcAgentMetricsExporterConfiguration.builder()
+            .setEndPoint("192.168.0.1:50051")
+            .setServiceName("service")
+            .setUseInsecure(false)
+            .setSslContext(sslContext)
+            .setRetryInterval(fiveMinutes)
+            .setExportInterval(oneMinute)
+            .build();
+    assertThat(configuration.getEndPoint()).isEqualTo("192.168.0.1:50051");
+    assertThat(configuration.getServiceName()).isEqualTo("service");
+    assertThat(configuration.getUseInsecure()).isFalse();
+    assertThat(configuration.getSslContext()).isEqualTo(sslContext);
+    assertThat(configuration.getRetryInterval()).isEqualTo(fiveMinutes);
+    assertThat(configuration.getExportInterval()).isEqualTo(oneMinute);
+  }
+}
diff --git a/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterIntegrationTest.java b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterIntegrationTest.java
new file mode 100644
index 0000000..870590d
--- /dev/null
+++ b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsExporterIntegrationTest.java
@@ -0,0 +1,344 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.common.util.concurrent.MoreExecutors;
+import io.grpc.BindableService;
+import io.grpc.Server;
+import io.grpc.ServerBuilder;
+import io.grpc.netty.NettyServerBuilder;
+import io.opencensus.common.Duration;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.LongGauge;
+import io.opencensus.metrics.LongGauge.LongPoint;
+import io.opencensus.metrics.MetricRegistry;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
+import io.opencensus.proto.metrics.v1.Metric;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.StatsRecorder;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.Tagger;
+import io.opencensus.tags.Tags;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.Executor;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** End-to-end integration test for {@link OcAgentMetricsExporter}. */
+@RunWith(JUnit4.class)
+public class OcAgentMetricsExporterIntegrationTest {
+
+  private Server agent;
+  private FakeOcAgentMetricsServiceGrpcImpl fakeOcAgentMetricsServiceGrpc;
+
+  private static final String SERVICE_NAME = "integration-test";
+  private static final Duration RETRY_INTERVAL = Duration.create(2, 0);
+  private static final Duration EXPORT_INTERVAL = Duration.create(2, 0);
+
+  // The latency in milliseconds
+  private static final MeasureDouble M_LATENCY_MS =
+      MeasureDouble.create("repl/latency", "The latency in milliseconds per REPL loop", "ms");
+
+  // Counts the number of lines read.
+  private static final MeasureLong M_LINES_IN =
+      MeasureLong.create("repl/lines_in", "The number of lines read in", "1");
+
+  // Counts the number of non EOF(end-of-file) errors.
+  private static final MeasureLong M_ERRORS =
+      MeasureLong.create("repl/errors", "The number of errors encountered", "1");
+
+  // Counts/groups the lengths of lines read in.
+  private static final MeasureLong M_LINE_LENGTHS =
+      MeasureLong.create("repl/line_lengths", "The distribution of line lengths", "By");
+
+  // The tag "method"
+  private static final TagKey KEY_METHOD = TagKey.create("method");
+
+  // Defining the distribution aggregations
+  private static final Aggregation LATENCY_DISTRIBUTION =
+      Distribution.create(
+          BucketBoundaries.create(
+              Arrays.asList(
+                  // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms,
+                  // >=1s, >=2s, >=4s, >=6s]
+                  0.0,
+                  25.0,
+                  50.0,
+                  75.0,
+                  100.0,
+                  200.0,
+                  400.0,
+                  600.0,
+                  800.0,
+                  1000.0,
+                  2000.0,
+                  4000.0,
+                  6000.0)));
+
+  private static final Aggregation LENGTH_DISTRIBUTION =
+      Distribution.create(
+          BucketBoundaries.create(
+              Arrays.asList(
+                  // [>=0B, >=5B, >=10B, >=20B, >=40B, >=60B, >=80B, >=100B, >=200B, >=400B,
+                  // >=600B,
+                  // >=800B, >=1000B]
+                  0.0,
+                  5.0,
+                  10.0,
+                  20.0,
+                  40.0,
+                  60.0,
+                  80.0,
+                  100.0,
+                  200.0,
+                  400.0,
+                  600.0,
+                  800.0,
+                  1000.0)));
+
+  // Define the count aggregation
+  private static final Aggregation COUNT = Aggregation.Count.create();
+
+  // Empty column
+  private static final List<TagKey> NO_KEYS = Collections.emptyList();
+
+  // Define the views
+  private static final List<View> VIEWS =
+      Arrays.asList(
+          View.create(
+              Name.create("ocjavametrics/latency"),
+              "The distribution of latencies",
+              M_LATENCY_MS,
+              LATENCY_DISTRIBUTION,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/lines_in"),
+              "The number of lines read in from standard input",
+              M_LINES_IN,
+              COUNT,
+              NO_KEYS),
+          View.create(
+              Name.create("ocjavametrics/errors"),
+              "The number of errors encountered",
+              M_ERRORS,
+              COUNT,
+              Collections.singletonList(KEY_METHOD)),
+          View.create(
+              Name.create("ocjavametrics/line_lengths"),
+              "The distribution of line lengths",
+              M_LINE_LENGTHS,
+              LENGTH_DISTRIBUTION,
+              NO_KEYS));
+
+  private static final Random random = new Random();
+  private static final Tagger tagger = Tags.getTagger();
+  private static final StatsRecorder statsRecorder = Stats.getStatsRecorder();
+  private static final ViewManager viewManager = Stats.getViewManager();
+  private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
+
+  @Before
+  public void setUp() {
+    fakeOcAgentMetricsServiceGrpc = new FakeOcAgentMetricsServiceGrpcImpl();
+    agent =
+        getServer(
+            OcAgentMetricsExporterConfiguration.DEFAULT_END_POINT, fakeOcAgentMetricsServiceGrpc);
+  }
+
+  @After
+  public void tearDown() {
+    agent.shutdown();
+  }
+
+  @Test
+  public void testExportMetrics() throws InterruptedException, IOException {
+    // Mock a real-life scenario in production, where Agent is not enabled at first, then enabled
+    // after an outage. Users should be able to see metrics shortly after Agent is up.
+
+    registerAllViews();
+    LongGauge gauge = registerGauge();
+
+    // Register the OcAgent Exporter first.
+    // Agent is not yet up and running so Exporter will just retry connection.
+    OcAgentMetricsExporter.createAndRegister(
+        OcAgentMetricsExporterConfiguration.builder()
+            .setServiceName(SERVICE_NAME)
+            .setUseInsecure(true)
+            .setRetryInterval(RETRY_INTERVAL)
+            .setExportInterval(EXPORT_INTERVAL)
+            .build());
+
+    doWork(
+        5, gauge.getOrCreateTimeSeries(Collections.singletonList(LabelValue.create("First work"))));
+
+    // Wait 3s so that all metrics get exported.
+    Thread.sleep(3000);
+
+    // No interaction with Agent so far.
+    assertThat(fakeOcAgentMetricsServiceGrpc.getExportMetricsServiceRequests()).isEmpty();
+
+    // Imagine that an outage happened, now start Agent. Exporter should be able to connect to Agent
+    // after the next retry interval.
+    agent.start();
+
+    // Wait 3s for Exporter to start another attempt to connect to Agent.
+    Thread.sleep(3000);
+
+    doWork(
+        8,
+        gauge.getOrCreateTimeSeries(Collections.singletonList(LabelValue.create("Second work"))));
+
+    // Wait 3s so that all metrics get exported.
+    Thread.sleep(3000);
+
+    List<ExportMetricsServiceRequest> exportRequests =
+        fakeOcAgentMetricsServiceGrpc.getExportMetricsServiceRequests();
+    assertThat(exportRequests.size()).isAtLeast(2);
+
+    ExportMetricsServiceRequest firstRequest = exportRequests.get(0);
+    Node expectedNode = OcAgentNodeUtils.getNodeInfo(SERVICE_NAME);
+    Node actualNode = firstRequest.getNode();
+    assertThat(actualNode.getIdentifier().getHostName())
+        .isEqualTo(expectedNode.getIdentifier().getHostName());
+    assertThat(actualNode.getIdentifier().getPid())
+        .isEqualTo(expectedNode.getIdentifier().getPid());
+    assertThat(actualNode.getLibraryInfo()).isEqualTo(expectedNode.getLibraryInfo());
+    assertThat(actualNode.getServiceInfo()).isEqualTo(expectedNode.getServiceInfo());
+
+    List<Metric> metricProtos = new ArrayList<>();
+    for (int i = 1; i < exportRequests.size(); i++) {
+      metricProtos.addAll(exportRequests.get(i).getMetricsList());
+    }
+
+    // There should be at least one metric exported for each view and gauge (4 + 1).
+    assertThat(metricProtos.size()).isAtLeast(5);
+
+    Set<String> expectedMetrics = new HashSet<>();
+    expectedMetrics.add("jobs");
+    for (View view : VIEWS) {
+      expectedMetrics.add(view.getName().asString());
+    }
+    Set<String> actualMetrics = new HashSet<>();
+    for (Metric metricProto : metricProtos) {
+      actualMetrics.add(metricProto.getMetricDescriptor().getName());
+    }
+    assertThat(actualMetrics).containsAtLeastElementsIn(expectedMetrics);
+  }
+
+  private static void registerAllViews() {
+    for (View view : VIEWS) {
+      viewManager.registerView(view);
+    }
+  }
+
+  private static LongGauge registerGauge() {
+    return metricRegistry.addLongGauge(
+        "jobs", "Pending jobs", "1", Collections.singletonList(LabelKey.create("Name", "desc")));
+  }
+
+  private static void doWork(int jobs, LongPoint point) {
+    for (int i = 0; i < jobs; i++) {
+      point.set(jobs - i);
+      String line = generateRandom(random.nextInt(128));
+      processLine(line);
+      recordStat(M_LINES_IN, 1L);
+      recordStat(M_LINE_LENGTHS, (long) line.length());
+    }
+  }
+
+  private static String generateRandom(int size) {
+    byte[] array = new byte[size];
+    random.nextBytes(array);
+    return new String(array, Charset.forName("UTF-8"));
+  }
+
+  private static String processLine(String line) {
+    long startTimeNs = System.nanoTime();
+
+    try {
+      Thread.sleep(10L);
+      return line.toUpperCase(Locale.US);
+    } catch (Exception e) {
+      recordTaggedStat(KEY_METHOD, "processLine", M_ERRORS, 1L);
+      return "";
+    } finally {
+      long totalTimeNs = System.nanoTime() - startTimeNs;
+      double timespentMs = totalTimeNs / 1e6;
+      recordTaggedStat(KEY_METHOD, "processLine", M_LATENCY_MS, timespentMs);
+    }
+  }
+
+  private static void recordStat(MeasureLong ml, Long n) {
+    TagContext empty = tagger.emptyBuilder().build();
+    statsRecorder.newMeasureMap().put(ml, n).record(empty);
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureLong ml, long n) {
+    TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    statsRecorder.newMeasureMap().put(ml, n).record(context);
+  }
+
+  private static void recordTaggedStat(TagKey key, String value, MeasureDouble md, double d) {
+    TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
+    statsRecorder.newMeasureMap().put(md, d).record(context);
+  }
+
+  private static Server getServer(String endPoint, BindableService service) {
+    ServerBuilder<?> builder = NettyServerBuilder.forAddress(parseEndpoint(endPoint));
+    Executor executor = MoreExecutors.directExecutor();
+    builder.executor(executor);
+    return builder.addService(service).build();
+  }
+
+  private static InetSocketAddress parseEndpoint(String endPoint) {
+    try {
+      int colonIndex = endPoint.indexOf(":");
+      String host = endPoint.substring(0, colonIndex);
+      int port = Integer.parseInt(endPoint.substring(colonIndex + 1));
+      return new InetSocketAddress(host, port);
+    } catch (RuntimeException e) {
+      return new InetSocketAddress("localhost", 55678);
+    }
+  }
+}
diff --git a/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandlerTest.java b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandlerTest.java
new file mode 100644
index 0000000..59aec51
--- /dev/null
+++ b/exporters/metrics/ocagent/src/test/java/io/opencensus/exporter/metrics/ocagent/OcAgentMetricsServiceExportRpcHandlerTest.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.grpc.ManagedChannel;
+import io.grpc.Server;
+import io.grpc.Status;
+import io.grpc.inprocess.InProcessChannelBuilder;
+import io.grpc.inprocess.InProcessServerBuilder;
+import io.opencensus.proto.agent.common.v1.LibraryInfo;
+import io.opencensus.proto.agent.common.v1.LibraryInfo.Language;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
+import io.opencensus.proto.agent.metrics.v1.MetricsServiceGrpc;
+import io.opencensus.proto.agent.metrics.v1.MetricsServiceGrpc.MetricsServiceStub;
+import java.io.IOException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link OcAgentMetricsServiceExportRpcHandler}. */
+@RunWith(JUnit4.class)
+public class OcAgentMetricsServiceExportRpcHandlerTest {
+
+  private final FakeOcAgentMetricsServiceGrpcImpl traceServiceGrpc =
+      new FakeOcAgentMetricsServiceGrpcImpl();
+  private final String serverName = InProcessServerBuilder.generateName();
+  private final Server server =
+      InProcessServerBuilder.forName(serverName)
+          .directExecutor() // directExecutor is fine for unit tests
+          .addService(traceServiceGrpc)
+          .build();
+
+  private static final Node NODE =
+      Node.newBuilder()
+          .setLibraryInfo(LibraryInfo.newBuilder().setLanguage(Language.JAVA).build())
+          .build();
+
+  @Before
+  public void setUp() throws IOException {
+    server.start();
+  }
+
+  @After
+  public void tearDown() {
+    if (!server.isTerminated()) {
+      server.shutdown();
+    }
+  }
+
+  @Test
+  public void export_createAndExport() {
+    OcAgentMetricsServiceExportRpcHandler exportRpcHandler =
+        OcAgentMetricsServiceExportRpcHandler.create(getStub(serverName));
+    ExportMetricsServiceRequest request =
+        ExportMetricsServiceRequest.newBuilder().setNode(NODE).build();
+    exportRpcHandler.onExport(request);
+    assertThat(traceServiceGrpc.getExportMetricsServiceRequests()).containsExactly(request);
+  }
+
+  @Test
+  public void export_Create_ConnectionFailed() {
+    String nonExistingServer = "unknown";
+    OcAgentMetricsServiceExportRpcHandler exportRpcHandler =
+        OcAgentMetricsServiceExportRpcHandler.create(getStub(nonExistingServer));
+    assertThat(exportRpcHandler.isCompleted()).isTrue();
+    assertThat(exportRpcHandler.getTerminateStatus().getCode()).isEqualTo(Status.Code.UNAVAILABLE);
+  }
+
+  @Test
+  public void export_Complete_Interrupted() {
+    OcAgentMetricsServiceExportRpcHandler exportRpcHandler =
+        OcAgentMetricsServiceExportRpcHandler.create(getStub(serverName));
+    assertThat(exportRpcHandler.isCompleted()).isFalse();
+    exportRpcHandler.onComplete(new InterruptedException());
+    assertThat(exportRpcHandler.isCompleted()).isTrue();
+    assertThat(exportRpcHandler.getTerminateStatus()).isEqualTo(Status.UNKNOWN);
+  }
+
+  private static MetricsServiceStub getStub(String serverName) {
+    ManagedChannel channel = InProcessChannelBuilder.forName(serverName).directExecutor().build();
+    return MetricsServiceGrpc.newStub(channel);
+  }
+}
diff --git a/exporters/metrics/util/README.md b/exporters/metrics/util/README.md
new file mode 100644
index 0000000..665dc04
--- /dev/null
+++ b/exporters/metrics/util/README.md
@@ -0,0 +1,29 @@
+# OpenCensus Java Metrics Exporter Util
+
+The *OpenCensus Metrics Exporter Util* is the Java helper package for all metrics exporters.
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.19.0</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-exporter-metrics-util</artifactId>
+    <version>0.19.0</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.19.0'
+compile 'io.opencensus:opencensus-exporter-metrics-util:0.19.0'
+```
diff --git a/exporters/metrics/util/build.gradle b/exporters/metrics/util/build.gradle
new file mode 100644
index 0000000..dbff4e8
--- /dev/null
+++ b/exporters/metrics/util/build.gradle
@@ -0,0 +1,15 @@
+description = 'OpenCensus Java Metrics Exporter Utils'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.7
+    it.targetCompatibility = 1.7
+}
+
+dependencies {
+    compileOnly libraries.auto_value
+
+    compile project(':opencensus-api'),
+            libraries.guava
+
+    signature "org.codehaus.mojo.signature:java17:1.0@signature"
+}
diff --git a/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/IntervalMetricReader.java b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/IntervalMetricReader.java
new file mode 100644
index 0000000..f795b9c
--- /dev/null
+++ b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/IntervalMetricReader.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.Duration;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Wrapper of the {@link MetricReader} which automatically reads and exports the metrics every
+ * export interval.
+ *
+ * @since 0.19
+ */
+public final class IntervalMetricReader {
+  @VisibleForTesting static final Duration DEFAULT_INTERVAL = Duration.create(60, 0);
+  private static final Duration ZERO = Duration.create(0, 0);
+
+  private final Thread workerThread;
+  private final Worker worker;
+
+  private IntervalMetricReader(Worker worker) {
+    this.worker = worker;
+    this.workerThread = new Thread(worker);
+    try {
+      this.workerThread.setName("ExportWorkerThread");
+      this.workerThread.setDaemon(true);
+    } catch (SecurityException e) {
+      // OK if we can't set the name or daemon in this environment.
+    }
+    workerThread.start();
+  }
+
+  /**
+   * Options for {@link IntervalMetricReader}.
+   *
+   * @since 0.19
+   */
+  @AutoValue
+  @Immutable
+  public abstract static class Options {
+
+    Options() {}
+
+    /**
+     * Returns the export interval between pushes to StackDriver.
+     *
+     * @return the export interval.
+     * @since 0.19
+     */
+    public abstract Duration getExportInterval();
+
+    /**
+     * Returns a new {@link Builder}.
+     *
+     * @return a {@code Builder}.
+     * @since 0.19
+     */
+    public static Builder builder() {
+      return new AutoValue_IntervalMetricReader_Options.Builder()
+          .setExportInterval(DEFAULT_INTERVAL);
+    }
+
+    /**
+     * Builder for {@link Options}.
+     *
+     * @since 0.19
+     */
+    @AutoValue.Builder
+    public abstract static class Builder {
+      /**
+       * Sets the export interval.
+       *
+       * @param exportInterval the export interval between pushes to StackDriver.
+       * @return this.
+       * @since 0.19
+       */
+      public abstract Builder setExportInterval(Duration exportInterval);
+
+      /**
+       * Builds a new {@link Options} with current settings.
+       *
+       * @return a {@code Options}.
+       * @since 0.19
+       */
+      public abstract Options build();
+    }
+  }
+
+  /**
+   * Creates a new {@link IntervalMetricReader}.
+   *
+   * @param metricExporter the {@link MetricExporter} to be called after.
+   * @param metricReader the {@link MetricReader} to be used to read metrics.
+   * @param options the {@link Options} for the new {@link IntervalMetricReader}.
+   * @return a new {@link IntervalMetricReader}.
+   * @since 0.19
+   */
+  public static IntervalMetricReader create(
+      MetricExporter metricExporter, MetricReader metricReader, Options options) {
+    checkNotNull(options, "options");
+    Duration exportInterval = checkNotNull(options.getExportInterval(), "exportInterval");
+    checkArgument(exportInterval.compareTo(ZERO) > 0, "Export interval must be positive");
+
+    return new IntervalMetricReader(
+        new Worker(
+            checkNotNull(metricExporter, "metricExporter"),
+            exportInterval.toMillis(),
+            checkNotNull(metricReader, "metricReader")));
+  }
+
+  /**
+   * Reads and exports data immediately.
+   *
+   * @since 0.19
+   */
+  public void readAndExportNow() {
+    worker.readAndExport();
+  }
+
+  /**
+   * Stops the worker thread by calling {@link Thread#interrupt()}.
+   *
+   * @since 0.19
+   */
+  public void stop() {
+    workerThread.interrupt();
+  }
+
+  private static final class Worker implements Runnable {
+
+    private final MetricExporter metricExporter;
+    private final long exportInterval;
+    private final MetricReader metricReader;
+
+    private Worker(MetricExporter metricExporter, long exportInterval, MetricReader metricReader) {
+      this.metricExporter = metricExporter;
+      this.exportInterval = exportInterval;
+      this.metricReader = metricReader;
+    }
+
+    @Override
+    public void run() {
+      while (true) {
+        try {
+          Thread.sleep(exportInterval);
+        } catch (InterruptedException ie) {
+          // Preserve the interruption status as per guidance and stop doing any work.
+          Thread.currentThread().interrupt();
+          break;
+        }
+        readAndExport();
+      }
+      // Do one last readAndExport before stop.
+      readAndExport();
+    }
+
+    private void readAndExport() {
+      metricReader.readAndExport(metricExporter);
+    }
+  }
+}
diff --git a/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricExporter.java b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricExporter.java
new file mode 100644
index 0000000..54f6bad
--- /dev/null
+++ b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricExporter.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import io.opencensus.metrics.export.Metric;
+import java.util.Collection;
+
+/**
+ * Abstract class that represents a metric exporter.
+ *
+ * @since 0.19
+ */
+public abstract class MetricExporter {
+
+  /**
+   * Exports the list of given {@link Metric}.
+   *
+   * @param metrics the list of {@link Metric} to be exported.
+   * @since 0.19
+   */
+  public abstract void export(Collection<Metric> metrics);
+}
diff --git a/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricReader.java b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricReader.java
new file mode 100644
index 0000000..73c9cb2
--- /dev/null
+++ b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/MetricReader.java
@@ -0,0 +1,181 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.Scope;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.MetricProducerManager;
+import io.opencensus.trace.Sampler;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Helper class to read all available {@link Metric}s from a {@link MetricProducerManager} and
+ * exports them to a {@link MetricExporter}.
+ *
+ * @since 0.19
+ */
+public class MetricReader {
+  private static final Tracer tracer = Tracing.getTracer();
+  private static final Logger logger = Logger.getLogger(MetricReader.class.getName());
+  private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
+  @VisibleForTesting static final String DEFAULT_SPAN_NAME = "ExportMetrics";
+
+  private final MetricProducerManager metricProducerManager;
+  private final String spanName;
+
+  private MetricReader(MetricProducerManager metricProducerManager, String spanName) {
+    this.metricProducerManager = metricProducerManager;
+    this.spanName = spanName;
+  }
+
+  /**
+   * Options for {@link MetricReader}.
+   *
+   * @since 0.19
+   */
+  @AutoValue
+  @Immutable
+  public abstract static class Options {
+
+    Options() {}
+
+    /**
+     * Returns the {@link MetricProducerManager}.
+     *
+     * @return the {@link MetricProducerManager}.
+     * @since 0.19
+     */
+    public abstract MetricProducerManager getMetricProducerManager();
+
+    /**
+     * Returns the span name for the {@link Span} created when data are read and exported.
+     *
+     * @return the span name for the {@link Span} created when data are read and exported.
+     * @since 0.19
+     */
+    public abstract String getSpanName();
+
+    /**
+     * Returns a new {@link Options.Builder}.
+     *
+     * @return a {@code Builder}.
+     * @since 0.19
+     */
+    public static Builder builder() {
+      return new AutoValue_MetricReader_Options.Builder()
+          .setMetricProducerManager(Metrics.getExportComponent().getMetricProducerManager())
+          .setSpanName(DEFAULT_SPAN_NAME);
+    }
+
+    /**
+     * Builder for {@link MetricReader.Options}.
+     *
+     * @since 0.19
+     */
+    @AutoValue.Builder
+    public abstract static class Builder {
+      /**
+       * Sets the {@link MetricProducerManager}.
+       *
+       * @param metricProducerManager the {@link MetricProducerManager}.
+       * @return this.
+       * @since 0.19
+       */
+      public abstract Builder setMetricProducerManager(MetricProducerManager metricProducerManager);
+
+      /**
+       * Sets the span name for the {@link Span} created when data are read and exported.
+       *
+       * @param spanName the span name for the {@link Span} created when data are read and exported.
+       * @return this.
+       * @since 0.19
+       */
+      public abstract Builder setSpanName(String spanName);
+
+      /**
+       * Builds a new {@link Options} with current settings.
+       *
+       * @return a {@code Options}.
+       * @since 0.19
+       */
+      public abstract Options build();
+    }
+  }
+
+  /**
+   * Creates a new {@link MetricReader}.
+   *
+   * @param options the options for {@link MetricReader}.
+   * @return a new {@link MetricReader}.
+   * @since 0.19
+   */
+  public static MetricReader create(Options options) {
+    checkNotNull(options, "options");
+    return new MetricReader(
+        checkNotNull(options.getMetricProducerManager(), "metricProducerManager"),
+        checkNotNull(options.getSpanName(), "spanName"));
+  }
+
+  /**
+   * Reads the metrics from the {@link MetricProducerManager} and exports them to the {@code
+   * metricExporter}.
+   *
+   * @param metricExporter the exporter called to export the metrics read.
+   * @since 0.19
+   */
+  public void readAndExport(MetricExporter metricExporter) {
+    Span span =
+        tracer
+            .spanBuilder(spanName)
+            .setRecordEvents(true)
+            .setSampler(probabilitySampler)
+            .startSpan();
+    Scope scope = tracer.withSpan(span);
+    try {
+      ArrayList<Metric> metricsList = new ArrayList<>();
+      for (MetricProducer metricProducer : metricProducerManager.getAllMetricProducer()) {
+        metricsList.addAll(metricProducer.getMetrics());
+      }
+      metricExporter.export(metricsList);
+    } catch (Throwable e) {
+      logger.log(Level.WARNING, "Exception thrown by the metrics exporter.", e);
+      span.setStatus(
+          Status.UNKNOWN.withDescription("Exception when export metrics: " + exceptionMessage(e)));
+    } finally {
+      scope.close();
+      span.end();
+    }
+  }
+
+  private static String exceptionMessage(Throwable e) {
+    return e.getMessage() != null ? e.getMessage() : e.getClass().getName();
+  }
+}
diff --git a/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/QueueMetricProducer.java b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/QueueMetricProducer.java
new file mode 100644
index 0000000..998638f
--- /dev/null
+++ b/exporters/metrics/util/src/main/java/io/opencensus/exporter/metrics/util/QueueMetricProducer.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.collect.EvictingQueue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricProducer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Queue;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Wrapper of {@link MetricProducer} which allows metrics to be pushed and buffered.
+ *
+ * @since 0.20
+ */
+public final class QueueMetricProducer extends MetricProducer {
+
+  private static final Object monitor = new Object();
+
+  private static final int DEFAULT_BUFFER_SIZE = 32;
+
+  @GuardedBy("monitor")
+  private final Queue<Metric> bufferedMetrics;
+
+  private QueueMetricProducer(int bufferSize) {
+    synchronized (monitor) {
+      bufferedMetrics = EvictingQueue.<Metric>create(bufferSize);
+    }
+  }
+
+  /**
+   * Creates a new {@link QueueMetricProducer}.
+   *
+   * @param options the options for {@link QueueMetricProducer}.
+   * @return a {@code QueueMetricProducer}.
+   * @since 0.20
+   */
+  public static QueueMetricProducer create(Options options) {
+    checkNotNull(options, "options");
+    checkArgument(options.getBufferSize() > 0, "buffer size should be positive.");
+    return new QueueMetricProducer(options.getBufferSize());
+  }
+
+  /**
+   * Pushes {@link Metric}s to this {@link QueueMetricProducer}.
+   *
+   * <p>When buffer of this {@link QueueMetricProducer} is full, the oldest {@link Metric}s will be
+   * dropped.
+   *
+   * @param metrics {@code Metrics} to be added to this {@code QueueMetricProducer}.
+   * @since 0.20
+   */
+  public void pushMetrics(Collection<Metric> metrics) {
+    synchronized (monitor) {
+      bufferedMetrics.addAll(metrics);
+    }
+  }
+
+  @Override
+  public Collection<Metric> getMetrics() {
+    List<Metric> metricsToExport;
+    synchronized (monitor) {
+      metricsToExport = new ArrayList<>(bufferedMetrics);
+      bufferedMetrics.clear();
+    }
+    return Collections.unmodifiableList(metricsToExport);
+  }
+
+  /**
+   * Options for {@link QueueMetricProducer}.
+   *
+   * @since 0.20
+   */
+  @AutoValue
+  @Immutable
+  public abstract static class Options {
+
+    Options() {}
+
+    /**
+     * Returns the buffer size for the {@link QueueMetricProducer}.
+     *
+     * @return the buffer size for the {@code QueueMetricProducer}.
+     * @since 0.20
+     */
+    public abstract int getBufferSize();
+
+    /**
+     * Returns a new {@link Builder}.
+     *
+     * @return a {@code Builder}.
+     * @since 0.20
+     */
+    public static Options.Builder builder() {
+      return new AutoValue_QueueMetricProducer_Options.Builder().setBufferSize(DEFAULT_BUFFER_SIZE);
+    }
+
+    /**
+     * Builder for {@link Options}.
+     *
+     * @since 0.20
+     */
+    @AutoValue.Builder
+    public abstract static class Builder {
+
+      /**
+       * Sets the buffer size to be used by the {@link QueueMetricProducer}.
+       *
+       * @param bufferSize the buffer size.
+       * @return this.
+       * @since 0.20
+       */
+      public abstract Builder setBufferSize(int bufferSize);
+
+      /**
+       * Builds a new {@link Options} with current settings.
+       *
+       * @return a {@code Options}.
+       * @since 0.20
+       */
+      public abstract Options build();
+    }
+  }
+}
diff --git a/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/FakeMetricExporter.java b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/FakeMetricExporter.java
new file mode 100644
index 0000000..d860a24
--- /dev/null
+++ b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/FakeMetricExporter.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import io.opencensus.metrics.export.Metric;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+
+class FakeMetricExporter extends MetricExporter {
+
+  private final Object monitor = new Object();
+
+  // TODO: Decide whether to use a different class instead of LinkedList.
+  @GuardedBy("monitor")
+  private List<List<Metric>> exportedMetrics = new ArrayList<>();
+
+  @Override
+  public void export(Collection<Metric> metricList) {
+    synchronized (monitor) {
+      this.exportedMetrics.add(new ArrayList<>(metricList));
+      monitor.notifyAll();
+    }
+  }
+
+  /**
+   * Waits until export is called for numberOfExports times. Returns the list of exported lists of
+   * metrics
+   */
+  @Nullable
+  List<List<Metric>> waitForNumberOfExports(int numberOfExports) {
+    List<List<Metric>> ret;
+    synchronized (monitor) {
+      while (exportedMetrics.size() < numberOfExports) {
+        try {
+          monitor.wait();
+        } catch (InterruptedException e) {
+          // Preserve the interruption status as per guidance.
+          Thread.currentThread().interrupt();
+          return null;
+        }
+      }
+      ret = exportedMetrics;
+      exportedMetrics = new ArrayList<>();
+    }
+    return ret;
+  }
+}
diff --git a/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/IntervalMetricReaderTest.java b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/IntervalMetricReaderTest.java
new file mode 100644
index 0000000..c531bb4
--- /dev/null
+++ b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/IntervalMetricReaderTest.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.truth.Truth.assertThat;
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.MetricProducerManager;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Value;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link IntervalMetricReader}. */
+@RunWith(JUnit4.class)
+public class IntervalMetricReaderTest {
+  private static final String METRIC_NAME = "my metric";
+  private static final String METRIC_DESCRIPTION = "metric description";
+  private static final String METRIC_UNIT = "us";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("KEY", "key description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("VALUE"));
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  private static final Value VALUE_LONG = Value.longValue(12345678);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Timestamp TIMESTAMP_2 = Timestamp.fromMillis(1000);
+  private static final Point POINT = Point.create(VALUE_LONG, TIMESTAMP);
+
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, TIMESTAMP_2);
+
+  private static final Metric METRIC =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES);
+
+  @Mock private MetricProducerManager metricProducerManager;
+  @Mock private MetricProducer metricProducer;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+    Set<MetricProducer> metricProducerSet = new HashSet<>();
+    metricProducerSet.add(metricProducer);
+    when(metricProducer.getMetrics()).thenReturn(Collections.singletonList(METRIC));
+    when(metricProducerManager.getAllMetricProducer()).thenReturn(metricProducerSet);
+  }
+
+  @Test
+  public void testConstants() {
+    assertThat(IntervalMetricReader.DEFAULT_INTERVAL).isEqualTo(Duration.create(60, 0));
+  }
+
+  @Test
+  public void intervalExport() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    IntervalMetricReader intervalMetricReader =
+        IntervalMetricReader.create(
+            fakeMetricExporter,
+            MetricReader.create(
+                MetricReader.Options.builder()
+                    .setMetricProducerManager(metricProducerManager)
+                    .build()),
+            IntervalMetricReader.Options.builder()
+                .setExportInterval(Duration.create(0, (int) MILLISECONDS.toNanos(100)))
+                .build());
+    assertThat(fakeMetricExporter.waitForNumberOfExports(1))
+        .containsExactly(Collections.singletonList(METRIC));
+    assertThat(fakeMetricExporter.waitForNumberOfExports(2))
+        .containsExactly(Collections.singletonList(METRIC), Collections.singletonList(METRIC));
+    intervalMetricReader.stop();
+  }
+
+  @Test
+  public void exportAfterStop() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    IntervalMetricReader intervalMetricReader =
+        IntervalMetricReader.create(
+            fakeMetricExporter,
+            MetricReader.create(
+                MetricReader.Options.builder()
+                    .setMetricProducerManager(metricProducerManager)
+                    .build()),
+            IntervalMetricReader.Options.builder()
+                .setExportInterval(Duration.create(10, 0))
+                .build());
+    // Rely that this will be called in less than 10 seconds.
+    intervalMetricReader.stop();
+    assertThat(fakeMetricExporter.waitForNumberOfExports(1))
+        .containsExactly(Collections.singletonList(METRIC));
+  }
+}
diff --git a/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/MetricReaderTest.java b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/MetricReaderTest.java
new file mode 100644
index 0000000..0a06826
--- /dev/null
+++ b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/MetricReaderTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import io.opencensus.exporter.metrics.util.MetricReader.Options;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.MetricProducerManager;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link MetricReader}. */
+@RunWith(JUnit4.class)
+public class MetricReaderTest {
+  @Mock private MetricProducerManager metricProducerManager;
+  @Mock private MetricProducer metricProducer;
+  @Mock private MetricExporter metricExporter;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+  }
+
+  @Test
+  public void testConstants() {
+    assertThat(MetricReader.DEFAULT_SPAN_NAME).isEqualTo("ExportMetrics");
+  }
+
+  @Test
+  public void readAndExport() {
+    Set<MetricProducer> metricProducerSet = new HashSet<>();
+    metricProducerSet.add(metricProducer);
+    when(metricProducer.getMetrics()).thenReturn(Collections.<Metric>emptyList());
+    when(metricProducerManager.getAllMetricProducer()).thenReturn(metricProducerSet);
+    MetricReader metricReader =
+        MetricReader.create(
+            Options.builder().setMetricProducerManager(metricProducerManager).build());
+    metricReader.readAndExport(metricExporter);
+    verify(metricExporter).export(eq(Collections.<Metric>emptyList()));
+  }
+}
diff --git a/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/QueueMetricProducerTest.java b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/QueueMetricProducerTest.java
new file mode 100644
index 0000000..ca931d6
--- /dev/null
+++ b/exporters/metrics/util/src/test/java/io/opencensus/exporter/metrics/util/QueueMetricProducerTest.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.metrics.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.metrics.util.QueueMetricProducer.Options;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Value;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link QueueMetricProducer}. */
+@RunWith(JUnit4.class)
+public class QueueMetricProducerTest {
+
+  private static final String METRIC_NAME = "test_metric";
+  private static final String METRIC_DESCRIPTION = "test_description";
+  private static final String METRIC_UNIT = "us";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("test_key", "test_description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("test_value"));
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_INT64,
+          LABEL_KEY);
+
+  private static final Value VALUE_LONG = Value.longValue(12345678);
+  private static final Value VALUE_LONG_2 = Value.longValue(23456789);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Timestamp TIMESTAMP_2 = Timestamp.fromMillis(4000);
+  private static final Timestamp TIMESTAMP_3 = Timestamp.fromMillis(4000);
+  private static final Point POINT = Point.create(VALUE_LONG, TIMESTAMP);
+  private static final Point POINT_2 = Point.create(VALUE_LONG_2, TIMESTAMP);
+
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, TIMESTAMP_2);
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES_2 =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT_2, TIMESTAMP_3);
+
+  private static final Metric METRIC_1 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES);
+  private static final Metric METRIC_2 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES_2);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void createWithNegativeBufferSize() {
+    Options options = Options.builder().setBufferSize(-1).build();
+    thrown.expect(IllegalArgumentException.class);
+    QueueMetricProducer.create(options);
+  }
+
+  @Test
+  public void createWithZeroBufferSize() {
+    Options options = Options.builder().setBufferSize(0).build();
+    thrown.expect(IllegalArgumentException.class);
+    QueueMetricProducer.create(options);
+  }
+
+  @Test
+  public void pushMetrics() {
+    Options options = Options.builder().setBufferSize(1).build();
+    QueueMetricProducer producer = QueueMetricProducer.create(options);
+    producer.pushMetrics(Collections.singleton(METRIC_1));
+    assertThat(producer.getMetrics()).containsExactly(METRIC_1);
+    assertThat(producer.getMetrics()).isEmpty(); // Flush after each getMetrics().
+  }
+
+  @Test
+  public void pushMetrics_ExceedBufferSize() {
+    Options options = Options.builder().setBufferSize(1).build();
+    QueueMetricProducer producer = QueueMetricProducer.create(options);
+    producer.pushMetrics(Collections.singleton(METRIC_1));
+    producer.pushMetrics(Collections.singleton(METRIC_2));
+    assertThat(producer.getMetrics()).containsExactly(METRIC_2);
+  }
+}
diff --git a/exporters/stats/prometheus/README.md b/exporters/stats/prometheus/README.md
index fa19efc..7e8566d 100644
--- a/exporters/stats/prometheus/README.md
+++ b/exporters/stats/prometheus/README.md
@@ -21,17 +21,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-stats-prometheus</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -39,9 +39,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-stats-prometheus:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-stats-prometheus:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
diff --git a/exporters/stats/prometheus/build.gradle b/exporters/stats/prometheus/build.gradle
index fe8563c..95f129d 100644
--- a/exporters/stats/prometheus/build.gradle
+++ b/exporters/stats/prometheus/build.gradle
@@ -9,11 +9,10 @@
     compileOnly libraries.auto_value
 
     compile project(':opencensus-api'),
+            project(':opencensus-exporter-metrics-util'),
             libraries.guava,
             libraries.prometheus_simpleclient
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
\ No newline at end of file
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java
index 288813d..b4588a5 100644
--- a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java
@@ -23,21 +23,16 @@
 import com.google.common.collect.Lists;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
-import io.opencensus.stats.Aggregation;
-import io.opencensus.stats.Aggregation.Count;
-import io.opencensus.stats.Aggregation.Distribution;
-import io.opencensus.stats.Aggregation.Sum;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Distribution.BucketOptions;
+import io.opencensus.metrics.export.Distribution.BucketOptions.ExplicitOptions;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.Value;
 import io.prometheus.client.Collector;
 import io.prometheus.client.Collector.MetricFamilySamples;
 import io.prometheus.client.Collector.MetricFamilySamples.Sample;
@@ -45,162 +40,164 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map.Entry;
 
 /*>>>
 import org.checkerframework.checker.nullness.qual.Nullable;
 */
 
 /**
- * Util methods to convert OpenCensus Stats data models to Prometheus data models.
+ * Util methods to convert OpenCensus Metrics data models to Prometheus data models.
  *
- * <p>Each OpenCensus {@link View} will be converted to a Prometheus {@link MetricFamilySamples}
- * with no {@link Sample}s, and is used for registering Prometheus {@code Metric}s. Only {@code
- * Cumulative} views are supported. All views are under namespace "opencensus".
+ * <p>Each OpenCensus {@link MetricDescriptor} will be converted to a Prometheus {@link
+ * MetricFamilySamples} with no {@link Sample}s, and is used for registering Prometheus {@code
+ * Metric}s.
  *
- * <p>{@link Aggregation} will be converted to a corresponding Prometheus {@link Type}. {@link Sum}
- * will be {@link Type#UNTYPED}, {@link Count} will be {@link Type#COUNTER}, {@link
- * Aggregation.Mean} will be {@link Type#SUMMARY}, {@link Aggregation.LastValue} will be {@link
- * Type#GAUGE} and {@link Distribution} will be {@link Type#HISTOGRAM}. Please note we cannot set
- * bucket boundaries for custom {@link Type#HISTOGRAM}.
+ * <p>Each OpenCensus {@link Metric} will be converted to a Prometheus {@link MetricFamilySamples},
+ * and each {@code Point} of the {@link Metric} will be converted to Prometheus {@link Sample}s.
  *
- * <p>Each OpenCensus {@link ViewData} will be converted to a Prometheus {@link
- * MetricFamilySamples}, and each {@code Row} of the {@link ViewData} will be converted to
- * Prometheus {@link Sample}s.
+ * <p>{@code io.opencensus.metrics.export.Value.ValueDouble}, {@code
+ * io.opencensus.metrics.export.Value.ValueLong} will be converted to a single {@link Sample}.
+ * {@code io.opencensus.metrics.export.Value.ValueSummary} will be converted to two {@code Sample}s
+ * sum and count. {@code io.opencensus.metrics.export.Value.ValueDistribution} will be converted to
+ * a list of {@link Sample}s that have the sum, count and histogram buckets.
  *
- * <p>{@link SumDataDouble}, {@link SumDataLong}, {@link LastValueDataDouble}, {@link
- * LastValueDataLong} and {@link CountData} will be converted to a single {@link Sample}. {@link
- * AggregationData.MeanData} will be converted to two {@link Sample}s sum and count. {@link
- * DistributionData} will be converted to a list of {@link Sample}s that have the sum, count and
- * histogram buckets.
- *
- * <p>{@link TagKey} and {@link TagValue} will be converted to Prometheus {@code LabelName} and
- * {@code LabelValue}. {@code Null} {@link TagValue} will be converted to an empty string.
+ * <p>{@link LabelKey} and {@link LabelValue} will be converted to Prometheus {@code LabelName} and
+ * {@code LabelValue}. {@code Null} {@link LabelValue} will be converted to an empty string.
  *
  * <p>Please note that Prometheus Metric and Label name can only have alphanumeric characters and
  * underscore. All other characters will be sanitized by underscores.
  */
-@SuppressWarnings("deprecation")
 final class PrometheusExportUtils {
 
   @VisibleForTesting static final String SAMPLE_SUFFIX_BUCKET = "_bucket";
   @VisibleForTesting static final String SAMPLE_SUFFIX_COUNT = "_count";
   @VisibleForTesting static final String SAMPLE_SUFFIX_SUM = "_sum";
   @VisibleForTesting static final String LABEL_NAME_BUCKET_BOUND = "le";
+  @VisibleForTesting static final String LABEL_NAME_QUANTILE = "quantile";
 
-  private static final Function<Object, Type> TYPE_UNTYPED_FUNCTION =
-      Functions.returnConstant(Type.UNTYPED);
-  private static final Function<Object, Type> TYPE_COUNTER_FUNCTION =
-      Functions.returnConstant(Type.COUNTER);
-  private static final Function<Object, Type> TYPE_HISTOGRAM_FUNCTION =
-      Functions.returnConstant(Type.HISTOGRAM);
-  private static final Function<Object, Type> TYPE_GAUGE_FUNCTION =
-      Functions.returnConstant(Type.GAUGE);
-
-  // Converts a ViewData to a Prometheus MetricFamilySamples.
-  static MetricFamilySamples createMetricFamilySamples(ViewData viewData) {
-    View view = viewData.getView();
-    String name = Collector.sanitizeMetricName(view.getName().asString());
-    Type type = getType(view.getAggregation(), view.getWindow());
-    List<String> labelNames = convertToLabelNames(view.getColumns());
+  // Converts a Metric to a Prometheus MetricFamilySamples.
+  static MetricFamilySamples createMetricFamilySamples(Metric metric, String namespace) {
+    MetricDescriptor metricDescriptor = metric.getMetricDescriptor();
+    String name = getNamespacedName(metricDescriptor.getName(), namespace);
+    Type type = getType(metricDescriptor.getType());
+    List<String> labelNames = convertToLabelNames(metricDescriptor.getLabelKeys());
     List<Sample> samples = Lists.newArrayList();
-    for (Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
-        viewData.getAggregationMap().entrySet()) {
-      samples.addAll(
-          getSamples(name, labelNames, entry.getKey(), entry.getValue(), view.getAggregation()));
+
+    for (io.opencensus.metrics.export.TimeSeries timeSeries : metric.getTimeSeriesList()) {
+      for (io.opencensus.metrics.export.Point point : timeSeries.getPoints()) {
+        samples.addAll(getSamples(name, labelNames, timeSeries.getLabelValues(), point.getValue()));
+      }
     }
-    return new MetricFamilySamples(name, type, view.getDescription(), samples);
+    return new MetricFamilySamples(name, type, metricDescriptor.getDescription(), samples);
   }
 
-  // Converts a View to a Prometheus MetricFamilySamples.
+  // Converts a MetricDescriptor to a Prometheus MetricFamilySamples.
   // Used only for Prometheus metric registry, should not contain any actual samples.
-  static MetricFamilySamples createDescribableMetricFamilySamples(View view) {
-    String name = Collector.sanitizeMetricName(view.getName().asString());
-    Type type = getType(view.getAggregation(), view.getWindow());
-    List<String> labelNames = convertToLabelNames(view.getColumns());
+  static MetricFamilySamples createDescribableMetricFamilySamples(
+      MetricDescriptor metricDescriptor, String namespace) {
+    String name = getNamespacedName(metricDescriptor.getName(), namespace);
+    Type type = getType(metricDescriptor.getType());
+    List<String> labelNames = convertToLabelNames(metricDescriptor.getLabelKeys());
+
     if (containsDisallowedLeLabelForHistogram(labelNames, type)) {
       throw new IllegalStateException(
           "Prometheus Histogram cannot have a label named 'le', "
               + "because it is a reserved label for bucket boundaries. "
-              + "Please remove this tag key from your view.");
+              + "Please remove this key from your view.");
     }
+
+    if (containsDisallowedQuantileLabelForSummary(labelNames, type)) {
+      throw new IllegalStateException(
+          "Prometheus Summary cannot have a label named 'quantile', "
+              + "because it is a reserved label. Please remove this key from your view.");
+    }
+
     return new MetricFamilySamples(
-        name, type, view.getDescription(), Collections.<Sample>emptyList());
+        name, type, metricDescriptor.getDescription(), Collections.<Sample>emptyList());
+  }
+
+  private static String getNamespacedName(String metricName, String namespace) {
+    if (!namespace.isEmpty()) {
+      if (!namespace.endsWith("/") && !namespace.endsWith("_")) {
+        namespace += '_';
+      }
+      metricName = namespace + metricName;
+    }
+    return Collector.sanitizeMetricName(metricName);
   }
 
   @VisibleForTesting
-  static Type getType(Aggregation aggregation, View.AggregationWindow window) {
-    if (!(window instanceof View.AggregationWindow.Cumulative)) {
-      return Type.UNTYPED;
+  static Type getType(MetricDescriptor.Type type) {
+    if (type == MetricDescriptor.Type.CUMULATIVE_INT64
+        || type == MetricDescriptor.Type.CUMULATIVE_DOUBLE) {
+      return Type.COUNTER;
+    } else if (type == MetricDescriptor.Type.GAUGE_INT64
+        || type == MetricDescriptor.Type.GAUGE_DOUBLE) {
+      return Type.GAUGE;
+    } else if (type == MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION
+        || type == MetricDescriptor.Type.GAUGE_DISTRIBUTION) {
+      return Type.HISTOGRAM;
+    } else if (type == MetricDescriptor.Type.SUMMARY) {
+      return Type.SUMMARY;
     }
-    return aggregation.match(
-        TYPE_UNTYPED_FUNCTION, // SUM
-        TYPE_COUNTER_FUNCTION, // COUNT
-        TYPE_HISTOGRAM_FUNCTION, // DISTRIBUTION
-        TYPE_GAUGE_FUNCTION, // LAST VALUE
-        new Function<Aggregation, Type>() {
-          @Override
-          public Type apply(Aggregation arg) {
-            if (arg instanceof Aggregation.Mean) {
-              return Type.SUMMARY;
-            }
-            return Type.UNTYPED;
-          }
-        });
+    return Type.UNTYPED;
   }
 
-  // Converts a row in ViewData (a.k.a Entry<List<TagValue>, AggregationData>) to a list of
-  // Prometheus Samples.
+  // Converts a point value in Metric to a list of Prometheus Samples.
   @VisibleForTesting
   static List<Sample> getSamples(
       final String name,
       final List<String> labelNames,
-      List</*@Nullable*/ TagValue> tagValues,
-      AggregationData aggregationData,
-      final Aggregation aggregation) {
+      List<LabelValue> labelValuesList,
+      Value value) {
     Preconditions.checkArgument(
-        labelNames.size() == tagValues.size(), "Label names and tag values have different sizes.");
+        labelNames.size() == labelValuesList.size(), "Keys and Values don't have same size.");
     final List<Sample> samples = Lists.newArrayList();
-    final List<String> labelValues = new ArrayList<String>(tagValues.size());
-    for (TagValue tagValue : tagValues) {
-      String labelValue = tagValue == null ? "" : tagValue.asString();
-      labelValues.add(labelValue);
+
+    final List<String> labelValues = new ArrayList<String>(labelValuesList.size());
+    for (LabelValue labelValue : labelValuesList) {
+      String val = labelValue == null ? "" : labelValue.getValue();
+      labelValues.add(val == null ? "" : val);
     }
 
-    aggregationData.match(
-        new Function<SumDataDouble, Void>() {
+    return value.match(
+        new Function<Double, List<Sample>>() {
           @Override
-          public Void apply(SumDataDouble arg) {
-            samples.add(new Sample(name, labelNames, labelValues, arg.getSum()));
-            return null;
+          public List<Sample> apply(Double arg) {
+            samples.add(new Sample(name, labelNames, labelValues, arg));
+            return samples;
           }
         },
-        new Function<SumDataLong, Void>() {
+        new Function<Long, List<Sample>>() {
           @Override
-          public Void apply(SumDataLong arg) {
-            samples.add(new Sample(name, labelNames, labelValues, arg.getSum()));
-            return null;
+          public List<Sample> apply(Long arg) {
+            samples.add(new Sample(name, labelNames, labelValues, arg));
+            return samples;
           }
         },
-        new Function<CountData, Void>() {
+        new Function<Distribution, List<Sample>>() {
           @Override
-          public Void apply(CountData arg) {
-            samples.add(new Sample(name, labelNames, labelValues, arg.getCount()));
-            return null;
-          }
-        },
-        new Function<DistributionData, Void>() {
-          @Override
-          public Void apply(DistributionData arg) {
-            // For histogram buckets, manually add the bucket boundaries as "le" labels. See
-            // https://github.com/prometheus/client_java/commit/ed184d8e50c82e98bb2706723fff764424840c3a#diff-c505abbde72dd6bf36e89917b3469404R241
-            @SuppressWarnings("unchecked")
-            Distribution distribution = (Distribution) aggregation;
-            List<Double> boundaries = distribution.getBucketBoundaries().getBoundaries();
+          public List<Sample> apply(final Distribution arg) {
+            BucketOptions bucketOptions = arg.getBucketOptions();
+            List<Double> boundaries = new ArrayList<>();
+
+            if (bucketOptions != null) {
+              boundaries =
+                  bucketOptions.match(
+                      new Function<ExplicitOptions, List<Double>>() {
+                        @Override
+                        public List<Double> apply(ExplicitOptions arg) {
+                          return arg.getBucketBoundaries();
+                        }
+                      },
+                      Functions.<List<Double>>throwIllegalArgumentException());
+            }
+
             List<String> labelNamesWithLe = new ArrayList<String>(labelNames);
             labelNamesWithLe.add(LABEL_NAME_BUCKET_BOUND);
             long cumulativeCount = 0;
-            for (int i = 0; i < arg.getBucketCounts().size(); i++) {
+
+            for (int i = 0; i < arg.getBuckets().size(); i++) {
               List<String> labelValuesWithLe = new ArrayList<String>(labelValues);
               // The label value of "le" is the upper inclusive bound.
               // For the last bucket, it should be "+Inf".
@@ -208,7 +205,7 @@
                   doubleToGoString(
                       i < boundaries.size() ? boundaries.get(i) : Double.POSITIVE_INFINITY);
               labelValuesWithLe.add(bucketBoundary);
-              cumulativeCount += arg.getBucketCounts().get(i);
+              cumulativeCount += arg.getBuckets().get(i).getCount();
               samples.add(
                   new MetricFamilySamples.Sample(
                       name + SAMPLE_SUFFIX_BUCKET,
@@ -222,66 +219,59 @@
                     name + SAMPLE_SUFFIX_COUNT, labelNames, labelValues, arg.getCount()));
             samples.add(
                 new MetricFamilySamples.Sample(
-                    name + SAMPLE_SUFFIX_SUM,
-                    labelNames,
-                    labelValues,
-                    arg.getCount() * arg.getMean()));
-            return null;
+                    name + SAMPLE_SUFFIX_SUM, labelNames, labelValues, arg.getSum()));
+            return samples;
           }
         },
-        new Function<LastValueDataDouble, Void>() {
+        new Function<Summary, List<Sample>>() {
           @Override
-          public Void apply(LastValueDataDouble arg) {
-            samples.add(new Sample(name, labelNames, labelValues, arg.getLastValue()));
-            return null;
-          }
-        },
-        new Function<LastValueDataLong, Void>() {
-          @Override
-          public Void apply(LastValueDataLong arg) {
-            samples.add(new Sample(name, labelNames, labelValues, arg.getLastValue()));
-            return null;
-          }
-        },
-        new Function<AggregationData, Void>() {
-          @Override
-          public Void apply(AggregationData arg) {
-            // TODO(songya): remove this once Mean aggregation is completely removed. Before that
-            // we need to continue supporting Mean, since it could still be used by users and some
-            // deprecated RPC views.
-            if (arg instanceof AggregationData.MeanData) {
-              AggregationData.MeanData meanData = (AggregationData.MeanData) arg;
+          public List<Sample> apply(Summary arg) {
+            Long count = arg.getCount();
+            if (count != null) {
               samples.add(
                   new MetricFamilySamples.Sample(
-                      name + SAMPLE_SUFFIX_COUNT, labelNames, labelValues, meanData.getCount()));
-              samples.add(
-                  new MetricFamilySamples.Sample(
-                      name + SAMPLE_SUFFIX_SUM,
-                      labelNames,
-                      labelValues,
-                      meanData.getCount() * meanData.getMean()));
-              return null;
+                      name + SAMPLE_SUFFIX_COUNT, labelNames, labelValues, count));
             }
-            throw new IllegalArgumentException("Unknown Aggregation.");
-          }
-        });
+            Double sum = arg.getSum();
+            if (sum != null) {
+              samples.add(
+                  new MetricFamilySamples.Sample(
+                      name + SAMPLE_SUFFIX_SUM, labelNames, labelValues, sum));
+            }
 
-    return samples;
+            List<ValueAtPercentile> valueAtPercentiles = arg.getSnapshot().getValueAtPercentiles();
+            List<String> labelNamesWithQuantile = new ArrayList<String>(labelNames);
+            labelNamesWithQuantile.add(LABEL_NAME_QUANTILE);
+            for (ValueAtPercentile valueAtPercentile : valueAtPercentiles) {
+              List<String> labelValuesWithQuantile = new ArrayList<String>(labelValues);
+              labelValuesWithQuantile.add(
+                  doubleToGoString(valueAtPercentile.getPercentile() / 100));
+              samples.add(
+                  new MetricFamilySamples.Sample(
+                      name,
+                      labelNamesWithQuantile,
+                      labelValuesWithQuantile,
+                      valueAtPercentile.getValue()));
+            }
+            return samples;
+          }
+        },
+        Functions.<List<Sample>>throwIllegalArgumentException());
   }
 
-  // Converts the list of tag keys to a list of string label names. Also sanitizes the tag keys.
+  // Converts the list of label keys to a list of string label names. Also sanitizes the label keys.
   @VisibleForTesting
-  static List<String> convertToLabelNames(List<TagKey> tagKeys) {
-    final List<String> labelNames = new ArrayList<String>(tagKeys.size());
-    for (TagKey tagKey : tagKeys) {
-      labelNames.add(Collector.sanitizeMetricName(tagKey.getName()));
+  static List<String> convertToLabelNames(List<LabelKey> labelKeys) {
+    final List<String> labelNames = new ArrayList<String>(labelKeys.size());
+    for (LabelKey labelKey : labelKeys) {
+      labelNames.add(Collector.sanitizeMetricName(labelKey.getKey()));
     }
     return labelNames;
   }
 
   // Returns true if there is an "le" label name in histogram label names, returns false otherwise.
   // Similar check to
-  // https://github.com/prometheus/client_java/commit/ed184d8e50c82e98bb2706723fff764424840c3a#diff-c505abbde72dd6bf36e89917b3469404R78
+  // https://github.com/prometheus/client_java/blob/af39ca948ca446757f14d8da618a72d18a46ef3d/simpleclient/src/main/java/io/prometheus/client/Histogram.java#L88
   static boolean containsDisallowedLeLabelForHistogram(List<String> labelNames, Type type) {
     if (!Type.HISTOGRAM.equals(type)) {
       return false;
@@ -294,5 +284,20 @@
     return false;
   }
 
+  // Returns true if there is an "quantile" label name in summary label names, returns false
+  // otherwise. Similar check to
+  // https://github.com/prometheus/client_java/blob/af39ca948ca446757f14d8da618a72d18a46ef3d/simpleclient/src/main/java/io/prometheus/client/Summary.java#L132
+  static boolean containsDisallowedQuantileLabelForSummary(List<String> labelNames, Type type) {
+    if (!Type.SUMMARY.equals(type)) {
+      return false;
+    }
+    for (String label : labelNames) {
+      if (LABEL_NAME_QUANTILE.equals(label)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
   private PrometheusExportUtils() {}
 }
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java
index d555c92..3d2b680 100644
--- a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java
@@ -17,42 +17,42 @@
 package io.opencensus.exporter.stats.prometheus;
 
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.containsDisallowedLeLabelForHistogram;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.containsDisallowedQuantileLabelForSummary;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.convertToLabelNames;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.getType;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import io.opencensus.common.Scope;
-import io.opencensus.stats.Stats;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewManager;
-import io.opencensus.trace.Sampler;
-import io.opencensus.trace.Span;
+import io.opencensus.exporter.metrics.util.MetricExporter;
+import io.opencensus.exporter.metrics.util.MetricReader;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricProducerManager;
 import io.opencensus.trace.Status;
 import io.opencensus.trace.Tracer;
 import io.opencensus.trace.Tracing;
-import io.opencensus.trace.samplers.Samplers;
 import io.prometheus.client.Collector;
 import io.prometheus.client.CollectorRegistry;
+import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
 /**
- * OpenCensus Stats {@link Collector} for Prometheus.
+ * OpenCensus Metrics {@link Collector} for Prometheus.
  *
  * @since 0.12
  */
-@SuppressWarnings("deprecation")
 public final class PrometheusStatsCollector extends Collector implements Collector.Describable {
 
   private static final Logger logger = Logger.getLogger(PrometheusStatsCollector.class.getName());
   private static final Tracer tracer = Tracing.getTracer();
-  private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
-
-  private final ViewManager viewManager;
+  private static final String DESCRIBE_METRICS_FOR_PROMETHEUS = "DescribeMetricsToPrometheus";
+  private static final String EXPORT_METRICS_TO_PROMETHEUS = "ExportMetricsToPrometheus";
+  private final MetricReader collectMetricReader;
+  private final MetricReader describeMetricReader;
+  private final String namespace;
 
   /**
    * Creates a {@link PrometheusStatsCollector} and registers it to Prometheus {@link
@@ -69,7 +69,8 @@
    * @since 0.12
    */
   public static void createAndRegister() {
-    new PrometheusStatsCollector(Stats.getViewManager()).register();
+    new PrometheusStatsCollector(Metrics.getExportComponent().getMetricProducerManager(), "")
+        .register();
   }
 
   /**
@@ -88,87 +89,107 @@
     if (registry == null) {
       registry = CollectorRegistry.defaultRegistry;
     }
-    new PrometheusStatsCollector(Stats.getViewManager()).register(registry);
+    new PrometheusStatsCollector(
+            Metrics.getExportComponent().getMetricProducerManager(), configuration.getNamespace())
+        .register(registry);
+  }
+
+  private static final class ExportMetricExporter extends MetricExporter {
+    private final ArrayList<MetricFamilySamples> samples = new ArrayList<>();
+    private final String namespace;
+
+    private ExportMetricExporter(String namespace) {
+      this.namespace = namespace;
+    }
+
+    @Override
+    public void export(Collection<Metric> metrics) {
+      samples.ensureCapacity(metrics.size());
+      for (Metric metric : metrics) {
+        MetricDescriptor metricDescriptor = metric.getMetricDescriptor();
+        if (containsDisallowedLeLabelForHistogram(
+                convertToLabelNames(metricDescriptor.getLabelKeys()),
+                getType(metricDescriptor.getType()))
+            || containsDisallowedQuantileLabelForSummary(
+                convertToLabelNames(metricDescriptor.getLabelKeys()),
+                getType(metricDescriptor.getType()))) {
+          // silently skip Distribution metricdescriptor with "le" label key and Summary
+          // metricdescriptor with "quantile" label key
+          continue;
+        }
+        try {
+          samples.add(PrometheusExportUtils.createMetricFamilySamples(metric, namespace));
+        } catch (Throwable e) {
+          logger.log(Level.WARNING, "Exception thrown when collecting metric samples.", e);
+          tracer
+              .getCurrentSpan()
+              .setStatus(
+                  Status.UNKNOWN.withDescription(
+                      "Exception thrown when collecting Prometheus Metric Samples: "
+                          + exceptionMessage(e)));
+        }
+      }
+    }
   }
 
   @Override
   public List<MetricFamilySamples> collect() {
-    List<MetricFamilySamples> samples = Lists.newArrayList();
-    Span span =
-        tracer
-            .spanBuilder("ExportStatsToPrometheus")
-            .setSampler(probabilitySampler)
-            .setRecordEvents(true)
-            .startSpan();
-    span.addAnnotation("Collect Prometheus Metric Samples.");
-    Scope scope = tracer.withSpan(span);
-    try {
-      for (View view : viewManager.getAllExportedViews()) {
-        if (containsDisallowedLeLabelForHistogram(
-            convertToLabelNames(view.getColumns()),
-            getType(view.getAggregation(), view.getWindow()))) {
-          continue; // silently skip Distribution views with "le" tag key
-        }
+    ExportMetricExporter exportMetricExporter = new ExportMetricExporter(namespace);
+    collectMetricReader.readAndExport(exportMetricExporter);
+    return exportMetricExporter.samples;
+  }
+
+  private static final class DescribeMetricExporter extends MetricExporter {
+    private final ArrayList<MetricFamilySamples> samples = new ArrayList<>();
+    private final String namespace;
+
+    private DescribeMetricExporter(String namespace) {
+      this.namespace = namespace;
+    }
+
+    @Override
+    public void export(Collection<Metric> metrics) {
+      samples.ensureCapacity(metrics.size());
+      for (Metric metric : metrics) {
         try {
-          ViewData viewData = viewManager.getView(view.getName());
-          if (viewData == null) {
-            continue;
-          } else {
-            samples.add(PrometheusExportUtils.createMetricFamilySamples(viewData));
-          }
+          samples.add(
+              PrometheusExportUtils.createDescribableMetricFamilySamples(
+                  metric.getMetricDescriptor(), namespace));
         } catch (Throwable e) {
-          logger.log(Level.WARNING, "Exception thrown when collecting metric samples.", e);
-          span.setStatus(
-              Status.UNKNOWN.withDescription(
-                  "Exception thrown when collecting Prometheus Metric Samples: "
-                      + exceptionMessage(e)));
+          logger.log(Level.WARNING, "Exception thrown when describing metrics.", e);
+          tracer
+              .getCurrentSpan()
+              .setStatus(
+                  Status.UNKNOWN.withDescription(
+                      "Exception thrown when describing Prometheus Metrics: "
+                          + exceptionMessage(e)));
         }
       }
-      span.addAnnotation("Finish collecting Prometheus Metric Samples.");
-    } finally {
-      scope.close();
-      span.end();
     }
-    return samples;
   }
 
   @Override
   public List<MetricFamilySamples> describe() {
-    List<MetricFamilySamples> samples = Lists.newArrayList();
-    Span span =
-        tracer
-            .spanBuilder("DescribeMetricsForPrometheus")
-            .setSampler(probabilitySampler)
-            .setRecordEvents(true)
-            .startSpan();
-    span.addAnnotation("Describe Prometheus Metrics.");
-    Scope scope = tracer.withSpan(span);
-    try {
-      for (View view : viewManager.getAllExportedViews()) {
-        try {
-          samples.add(PrometheusExportUtils.createDescribableMetricFamilySamples(view));
-        } catch (Throwable e) {
-          logger.log(Level.WARNING, "Exception thrown when describing metrics.", e);
-          span.setStatus(
-              Status.UNKNOWN.withDescription(
-                  "Exception thrown when describing Prometheus Metrics: " + exceptionMessage(e)));
-        }
-      }
-      span.addAnnotation("Finish describing Prometheus Metrics.");
-    } finally {
-      scope.close();
-      span.end();
-    }
-    return samples;
+    DescribeMetricExporter describeMetricExporter = new DescribeMetricExporter(namespace);
+    describeMetricReader.readAndExport(describeMetricExporter);
+    return describeMetricExporter.samples;
   }
 
   @VisibleForTesting
-  PrometheusStatsCollector(ViewManager viewManager) {
-    this.viewManager = viewManager;
-    Tracing.getExportComponent()
-        .getSampledSpanStore()
-        .registerSpanNamesForCollection(
-            ImmutableList.of("DescribeMetricsForPrometheus", "ExportStatsToPrometheus"));
+  PrometheusStatsCollector(MetricProducerManager metricProducerManager, String namespace) {
+    this.collectMetricReader =
+        MetricReader.create(
+            MetricReader.Options.builder()
+                .setMetricProducerManager(metricProducerManager)
+                .setSpanName(EXPORT_METRICS_TO_PROMETHEUS)
+                .build());
+    this.describeMetricReader =
+        MetricReader.create(
+            MetricReader.Options.builder()
+                .setMetricProducerManager(metricProducerManager)
+                .setSpanName(DESCRIBE_METRICS_FOR_PROMETHEUS)
+                .build());
+    this.namespace = namespace;
   }
 
   private static String exceptionMessage(Throwable e) {
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java
index 3e8b95e..b609a88 100644
--- a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java
@@ -18,7 +18,6 @@
 
 import com.google.auto.value.AutoValue;
 import io.prometheus.client.CollectorRegistry;
-import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
 
 /**
@@ -38,17 +37,26 @@
    * @return the Prometheus {@code CollectorRegistry}.
    * @since 0.13
    */
-  @Nullable
   public abstract CollectorRegistry getRegistry();
 
   /**
+   * Returns the namespace used for Prometheus metrics.
+   *
+   * @return the namespace.
+   * @since 0.21
+   */
+  public abstract String getNamespace();
+
+  /**
    * Returns a new {@link Builder}.
    *
    * @return a {@code Builder}.
    * @since 0.13
    */
   public static Builder builder() {
-    return new AutoValue_PrometheusStatsConfiguration.Builder();
+    return new AutoValue_PrometheusStatsConfiguration.Builder()
+        .setRegistry(CollectorRegistry.defaultRegistry)
+        .setNamespace("");
   }
 
   /**
@@ -71,6 +79,15 @@
     public abstract Builder setRegistry(CollectorRegistry registry);
 
     /**
+     * Sets the namespace used for Prometheus metrics.
+     *
+     * @param namespace the namespace.
+     * @return this.
+     * @since 0.21
+     */
+    public abstract Builder setNamespace(String namespace);
+
+    /**
      * Builds a new {@link PrometheusStatsConfiguration} with current settings.
      *
      * @return a {@code PrometheusStatsConfiguration}.
diff --git a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java
index ca8315b..646d14b 100644
--- a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java
+++ b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java
@@ -18,41 +18,30 @@
 
 import static com.google.common.truth.Truth.assertThat;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.LABEL_NAME_BUCKET_BOUND;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.LABEL_NAME_QUANTILE;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_BUCKET;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_COUNT;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_SUM;
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.convertToLabelNames;
 
-import com.google.common.collect.ImmutableMap;
-import io.opencensus.common.Duration;
 import io.opencensus.common.Timestamp;
-import io.opencensus.stats.Aggregation.Count;
-import io.opencensus.stats.Aggregation.Distribution;
-import io.opencensus.stats.Aggregation.LastValue;
-import io.opencensus.stats.Aggregation.Mean;
-import io.opencensus.stats.Aggregation.Sum;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.MeanData;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.BucketBoundaries;
-import io.opencensus.stats.Measure.MeasureDouble;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow.Cumulative;
-import io.opencensus.stats.View.AggregationWindow.Interval;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
-import io.opencensus.stats.ViewData.AggregationWindowData.IntervalData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.Distribution.BucketOptions;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.Value;
 import io.prometheus.client.Collector.MetricFamilySamples;
 import io.prometheus.client.Collector.MetricFamilySamples.Sample;
 import io.prometheus.client.Collector.Type;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -62,63 +51,112 @@
 /** Unit tests for {@link PrometheusExportUtils}. */
 @RunWith(JUnit4.class)
 public class PrometheusExportUtilsTest {
-
   @Rule public final ExpectedException thrown = ExpectedException.none();
 
-  private static final Duration ONE_SECOND = Duration.create(1, 0);
-  private static final Cumulative CUMULATIVE = Cumulative.create();
-  private static final Interval INTERVAL = Interval.create(ONE_SECOND);
-  private static final Sum SUM = Sum.create();
-  private static final Count COUNT = Count.create();
-  private static final Mean MEAN = Mean.create();
-  private static final BucketBoundaries BUCKET_BOUNDARIES =
-      BucketBoundaries.create(Arrays.asList(-5.0, 0.0, 5.0));
-  private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
-  private static final LastValue LAST_VALUE = LastValue.create();
-  private static final View.Name VIEW_NAME_1 = View.Name.create("view1");
-  private static final View.Name VIEW_NAME_2 = View.Name.create("view2");
-  private static final View.Name VIEW_NAME_3 = View.Name.create("view-3");
-  private static final View.Name VIEW_NAME_4 = View.Name.create("-view4");
-  private static final String DESCRIPTION = "View description";
-  private static final MeasureDouble MEASURE_DOUBLE =
-      MeasureDouble.create("measure", "description", "1");
-  private static final TagKey K1 = TagKey.create("k1");
-  private static final TagKey K2 = TagKey.create("k2");
-  private static final TagKey K3 = TagKey.create("k-3");
-  private static final TagKey TAG_KEY_LE = TagKey.create(LABEL_NAME_BUCKET_BOUND);
-  private static final TagValue V1 = TagValue.create("v1");
-  private static final TagValue V2 = TagValue.create("v2");
-  private static final TagValue V3 = TagValue.create("v-3");
-  private static final SumDataDouble SUM_DATA_DOUBLE = SumDataDouble.create(-5.5);
-  private static final SumDataLong SUM_DATA_LONG = SumDataLong.create(123456789);
-  private static final CountData COUNT_DATA = CountData.create(12345);
-  private static final MeanData MEAN_DATA = MeanData.create(3.4, 22);
-  private static final DistributionData DISTRIBUTION_DATA =
-      DistributionData.create(4.4, 5, -3.2, 15.7, 135.22, Arrays.asList(0L, 2L, 2L, 1L));
-  private static final LastValueDataDouble LAST_VALUE_DATA_DOUBLE = LastValueDataDouble.create(7.9);
-  private static final LastValueDataLong LAST_VALUE_DATA_LONG = LastValueDataLong.create(66666666);
-  private static final View VIEW1 =
-      View.create(
-          VIEW_NAME_1, DESCRIPTION, MEASURE_DOUBLE, COUNT, Arrays.asList(K1, K2), CUMULATIVE);
-  private static final View VIEW2 =
-      View.create(VIEW_NAME_2, DESCRIPTION, MEASURE_DOUBLE, MEAN, Arrays.asList(K3), CUMULATIVE);
-  private static final View VIEW3 =
-      View.create(
-          VIEW_NAME_3, DESCRIPTION, MEASURE_DOUBLE, DISTRIBUTION, Arrays.asList(K1), CUMULATIVE);
-  private static final View VIEW4 =
-      View.create(VIEW_NAME_4, DESCRIPTION, MEASURE_DOUBLE, COUNT, Arrays.asList(K1), INTERVAL);
-  private static final View DISTRIBUTION_VIEW_WITH_LE_KEY =
-      View.create(
-          VIEW_NAME_1,
-          DESCRIPTION,
-          MEASURE_DOUBLE,
-          DISTRIBUTION,
-          Arrays.asList(K1, TAG_KEY_LE),
-          CUMULATIVE);
-  private static final CumulativeData CUMULATIVE_DATA =
-      CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
-  private static final IntervalData INTERVAL_DATA = IntervalData.create(Timestamp.fromMillis(1000));
-  private static final String SAMPLE_NAME = "view";
+  private static final String METRIC_NAME = "my_metric";
+  private static final String METRIC_NAME2 = "my_metric2";
+  private static final String METRIC_NAME3 = "my_metric3";
+  private static final String METRIC_DESCRIPTION = "metric description";
+  private static final String METRIC_UNIT = "us";
+  private static final String KEY_DESCRIPTION = "key description";
+
+  private static final LabelKey K1_LABEL_KEY = LabelKey.create("k1", KEY_DESCRIPTION);
+  private static final LabelKey K2_LABEL_KEY = LabelKey.create("k2", KEY_DESCRIPTION);
+  private static final LabelKey K3_LABEL_KEY = LabelKey.create("k-3", KEY_DESCRIPTION);
+  private static final LabelValue V1_LABEL_VALUE = LabelValue.create("v1");
+  private static final LabelValue V2_LABEL_VALUE = LabelValue.create("v2");
+  private static final LabelValue V3_LABEL_VALUE = LabelValue.create("v-3");
+  private static final List<LabelKey> LABEL_KEY = Arrays.asList(K1_LABEL_KEY, K2_LABEL_KEY);
+  private static final List<LabelValue> LABEL_VALUE = Arrays.asList(V1_LABEL_VALUE, V2_LABEL_VALUE);
+  private static final List<LabelKey> LE_LABEL_KEY =
+      Arrays.asList(K1_LABEL_KEY, LabelKey.create(LABEL_NAME_BUCKET_BOUND, KEY_DESCRIPTION));
+  private static final List<LabelKey> QUNATILE_LABEL_KEY =
+      Arrays.asList(K1_LABEL_KEY, LabelKey.create(LABEL_NAME_QUANTILE, KEY_DESCRIPTION));
+
+  private static final io.opencensus.metrics.export.Distribution DISTRIBUTION =
+      io.opencensus.metrics.export.Distribution.create(
+          5,
+          22,
+          135.22,
+          BucketOptions.explicitOptions(Arrays.asList(1.0, 2.0, 5.0)),
+          Arrays.asList(Bucket.create(0), Bucket.create(2), Bucket.create(2), Bucket.create(1)));
+  private static final Summary SUMMARY =
+      Summary.create(
+          22L,
+          74.8,
+          Snapshot.create(
+              10L, 87.07, Collections.singletonList(ValueAtPercentile.create(99, 10.2))));
+  private static final Summary SUMMARY_2 =
+      Summary.create(
+          22L,
+          74.8,
+          Snapshot.create(
+              10L,
+              87.07,
+              Arrays.asList(
+                  ValueAtPercentile.create(99.5, 8.2), ValueAtPercentile.create(99, 10.2))));
+  private static final Value DOUBLE_VALUE = Value.doubleValue(-5.5);
+  private static final Value LONG_VALUE = Value.longValue(123456789);
+  private static final Value DISTRIBUTION_VALUE = Value.distributionValue(DISTRIBUTION);
+  private static final Value SUMMARY_VALUE = Value.summaryValue(SUMMARY);
+  private static final Value SUMMARY_VALUE_2 = Value.summaryValue(SUMMARY_2);
+
+  private static final MetricDescriptor CUMULATIVE_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_INT64,
+          LABEL_KEY);
+  private static final MetricDescriptor SUMMARY_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME2,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.SUMMARY,
+          Collections.singletonList(K3_LABEL_KEY));
+  private static final MetricDescriptor HISTOGRAM_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME3,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION,
+          Collections.singletonList(K1_LABEL_KEY));
+  private static final MetricDescriptor LE_LABEL_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION,
+          LE_LABEL_KEY);
+  private static final MetricDescriptor QUANTILE_LABEL_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.SUMMARY,
+          QUNATILE_LABEL_KEY);
+
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Point LONG_POINT = Point.create(LONG_VALUE, TIMESTAMP);
+  private static final Point DISTRIBUTION_POINT = Point.create(DISTRIBUTION_VALUE, TIMESTAMP);
+  private static final Point SUMMARY_POINT = Point.create(SUMMARY_VALUE, TIMESTAMP);
+
+  private static final io.opencensus.metrics.export.TimeSeries LONG_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, LONG_POINT, null);
+  private static final io.opencensus.metrics.export.TimeSeries DISTRIBUTION_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          Collections.singletonList(V3_LABEL_VALUE), DISTRIBUTION_POINT, null);
+  private static final io.opencensus.metrics.export.TimeSeries SUMMARY_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          Collections.singletonList(V1_LABEL_VALUE), SUMMARY_POINT, null);
+
+  private static final Metric LONG_METRIC =
+      Metric.createWithOneTimeSeries(CUMULATIVE_METRIC_DESCRIPTOR, LONG_TIME_SERIES);
+  private static final Metric DISTRIBUTION_METRIC =
+      Metric.createWithOneTimeSeries(HISTOGRAM_METRIC_DESCRIPTOR, DISTRIBUTION_TIME_SERIES);
+  private static final Metric SUMMARY_METRIC =
+      Metric.createWithOneTimeSeries(SUMMARY_METRIC_DESCRIPTOR, SUMMARY_TIME_SERIES);
 
   @Test
   public void testConstants() {
@@ -130,123 +168,162 @@
 
   @Test
   public void getType() {
-    assertThat(PrometheusExportUtils.getType(COUNT, INTERVAL)).isEqualTo(Type.UNTYPED);
-    assertThat(PrometheusExportUtils.getType(COUNT, CUMULATIVE)).isEqualTo(Type.COUNTER);
-    assertThat(PrometheusExportUtils.getType(DISTRIBUTION, CUMULATIVE)).isEqualTo(Type.HISTOGRAM);
-    assertThat(PrometheusExportUtils.getType(SUM, CUMULATIVE)).isEqualTo(Type.UNTYPED);
-    assertThat(PrometheusExportUtils.getType(MEAN, CUMULATIVE)).isEqualTo(Type.SUMMARY);
-    assertThat(PrometheusExportUtils.getType(LAST_VALUE, CUMULATIVE)).isEqualTo(Type.GAUGE);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.CUMULATIVE_INT64))
+        .isEqualTo(Type.COUNTER);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.CUMULATIVE_DOUBLE))
+        .isEqualTo(Type.COUNTER);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION))
+        .isEqualTo(Type.HISTOGRAM);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.SUMMARY))
+        .isEqualTo(Type.SUMMARY);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.GAUGE_INT64))
+        .isEqualTo(Type.GAUGE);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.GAUGE_DOUBLE))
+        .isEqualTo(Type.GAUGE);
+    assertThat(PrometheusExportUtils.getType(MetricDescriptor.Type.GAUGE_DISTRIBUTION))
+        .isEqualTo(Type.HISTOGRAM);
   }
 
   @Test
   public void createDescribableMetricFamilySamples() {
-    assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW1))
+    assertThat(
+            PrometheusExportUtils.createDescribableMetricFamilySamples(
+                CUMULATIVE_METRIC_DESCRIPTOR, ""))
         .isEqualTo(
             new MetricFamilySamples(
-                "view1", Type.COUNTER, DESCRIPTION, Collections.<Sample>emptyList()));
-    assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW2))
+                METRIC_NAME, Type.COUNTER, METRIC_DESCRIPTION, Collections.<Sample>emptyList()));
+    assertThat(
+            PrometheusExportUtils.createDescribableMetricFamilySamples(
+                SUMMARY_METRIC_DESCRIPTOR, ""))
         .isEqualTo(
             new MetricFamilySamples(
-                "view2", Type.SUMMARY, DESCRIPTION, Collections.<Sample>emptyList()));
-    assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW3))
+                METRIC_NAME2, Type.SUMMARY, METRIC_DESCRIPTION, Collections.<Sample>emptyList()));
+    assertThat(
+            PrometheusExportUtils.createDescribableMetricFamilySamples(
+                HISTOGRAM_METRIC_DESCRIPTOR, ""))
         .isEqualTo(
             new MetricFamilySamples(
-                "view_3", Type.HISTOGRAM, DESCRIPTION, Collections.<Sample>emptyList()));
-    assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW4))
+                METRIC_NAME3, Type.HISTOGRAM, METRIC_DESCRIPTION, Collections.<Sample>emptyList()));
+  }
+
+  @Test
+  public void createDescribableMetricFamilySamples_WithNamespace() {
+    String namespace1 = "myorg";
+    assertThat(
+            PrometheusExportUtils.createDescribableMetricFamilySamples(
+                CUMULATIVE_METRIC_DESCRIPTOR, namespace1))
         .isEqualTo(
             new MetricFamilySamples(
-                "_view4", Type.UNTYPED, DESCRIPTION, Collections.<Sample>emptyList()));
+                namespace1 + '_' + METRIC_NAME,
+                Type.COUNTER,
+                METRIC_DESCRIPTION,
+                Collections.<Sample>emptyList()));
+
+    String namespace2 = "opencensus/";
+    assertThat(
+            PrometheusExportUtils.createDescribableMetricFamilySamples(
+                CUMULATIVE_METRIC_DESCRIPTOR, namespace2))
+        .isEqualTo(
+            new MetricFamilySamples(
+                "opencensus_" + METRIC_NAME,
+                Type.COUNTER,
+                METRIC_DESCRIPTION,
+                Collections.<Sample>emptyList()));
   }
 
   @Test
   public void getSamples() {
     assertThat(
             PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K1, K2)),
-                Arrays.asList(V1, V2),
-                SUM_DATA_DOUBLE,
-                SUM))
+                METRIC_NAME, convertToLabelNames(LABEL_KEY), LABEL_VALUE, DOUBLE_VALUE))
         .containsExactly(
-            new Sample(SAMPLE_NAME, Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), -5.5));
+            new Sample(METRIC_NAME, Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), -5.5));
     assertThat(
             PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K3)),
-                Arrays.asList(V3),
-                SUM_DATA_LONG,
-                SUM))
+                METRIC_NAME,
+                convertToLabelNames(Collections.singletonList(K3_LABEL_KEY)),
+                Collections.singletonList(V3_LABEL_VALUE),
+                LONG_VALUE))
         .containsExactly(
-            new Sample(SAMPLE_NAME, Arrays.asList("k_3"), Arrays.asList("v-3"), 123456789));
+            new Sample(
+                METRIC_NAME,
+                Collections.singletonList("k_3"),
+                Collections.singletonList("v-3"),
+                123456789));
     assertThat(
             PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K1, K3)),
-                Arrays.asList(V1, null),
-                COUNT_DATA,
-                COUNT))
+                METRIC_NAME,
+                convertToLabelNames(Arrays.asList(K1_LABEL_KEY, K3_LABEL_KEY)),
+                Arrays.asList(V1_LABEL_VALUE, null),
+                LONG_VALUE))
         .containsExactly(
-            new Sample(SAMPLE_NAME, Arrays.asList("k1", "k_3"), Arrays.asList("v1", ""), 12345));
+            new Sample(
+                METRIC_NAME, Arrays.asList("k1", "k_3"), Arrays.asList("v1", ""), 123456789));
     assertThat(
             PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K3)),
-                Arrays.asList(V3),
-                MEAN_DATA,
-                MEAN))
+                METRIC_NAME,
+                convertToLabelNames(Collections.singletonList(K3_LABEL_KEY)),
+                Collections.singletonList(V3_LABEL_VALUE),
+                SUMMARY_VALUE_2))
         .containsExactly(
-            new Sample(SAMPLE_NAME + "_count", Arrays.asList("k_3"), Arrays.asList("v-3"), 22),
-            new Sample(SAMPLE_NAME + "_sum", Arrays.asList("k_3"), Arrays.asList("v-3"), 74.8))
+            new Sample(
+                METRIC_NAME + "_count",
+                Collections.singletonList("k_3"),
+                Collections.singletonList("v-3"),
+                22),
+            new Sample(
+                METRIC_NAME + "_sum",
+                Collections.singletonList("k_3"),
+                Collections.singletonList("v-3"),
+                74.8),
+            new Sample(
+                METRIC_NAME,
+                Arrays.asList("k_3", LABEL_NAME_QUANTILE),
+                Arrays.asList("v-3", "0.995"),
+                8.2),
+            new Sample(
+                METRIC_NAME,
+                Arrays.asList("k_3", LABEL_NAME_QUANTILE),
+                Arrays.asList("v-3", "0.99"),
+                10.2))
         .inOrder();
     assertThat(
             PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K1)),
-                Arrays.asList(V1),
-                DISTRIBUTION_DATA,
-                DISTRIBUTION))
+                METRIC_NAME,
+                convertToLabelNames(Collections.singletonList(K1_LABEL_KEY)),
+                Collections.singletonList(V1_LABEL_VALUE),
+                DISTRIBUTION_VALUE))
         .containsExactly(
             new Sample(
-                SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "-5.0"), 0),
+                METRIC_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "1.0"), 0),
             new Sample(
-                SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "0.0"), 2),
+                METRIC_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "2.0"), 2),
             new Sample(
-                SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "5.0"), 4),
+                METRIC_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "5.0"), 4),
             new Sample(
-                SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "+Inf"), 5),
-            new Sample(SAMPLE_NAME + "_count", Arrays.asList("k1"), Arrays.asList("v1"), 5),
-            new Sample(SAMPLE_NAME + "_sum", Arrays.asList("k1"), Arrays.asList("v1"), 22.0))
+                METRIC_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "+Inf"), 5),
+            new Sample(
+                METRIC_NAME + "_count",
+                Collections.singletonList("k1"),
+                Collections.singletonList("v1"),
+                5),
+            new Sample(
+                METRIC_NAME + "_sum",
+                Collections.singletonList("k1"),
+                Collections.singletonList("v1"),
+                22.0))
         .inOrder();
-    assertThat(
-            PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K1, K2)),
-                Arrays.asList(V1, V2),
-                LAST_VALUE_DATA_DOUBLE,
-                LAST_VALUE))
-        .containsExactly(
-            new Sample(SAMPLE_NAME, Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 7.9));
-    assertThat(
-            PrometheusExportUtils.getSamples(
-                SAMPLE_NAME,
-                convertToLabelNames(Arrays.asList(K3)),
-                Arrays.asList(V3),
-                LAST_VALUE_DATA_LONG,
-                LAST_VALUE))
-        .containsExactly(
-            new Sample(SAMPLE_NAME, Arrays.asList("k_3"), Arrays.asList("v-3"), 66666666));
   }
 
   @Test
   public void getSamples_KeysAndValuesHaveDifferentSizes() {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Label names and tag values have different sizes.");
+    thrown.expectMessage("Keys and Values don't have same size.");
     PrometheusExportUtils.getSamples(
-        SAMPLE_NAME,
-        convertToLabelNames(Arrays.asList(K1, K2, K3)),
-        Arrays.asList(V1, V2),
-        DISTRIBUTION_DATA,
-        DISTRIBUTION);
+        METRIC_NAME,
+        convertToLabelNames(Arrays.asList(K1_LABEL_KEY, K3_LABEL_KEY, K3_LABEL_KEY)),
+        Arrays.asList(V1_LABEL_VALUE, V2_LABEL_VALUE),
+        DISTRIBUTION_VALUE);
   }
 
   @Test
@@ -255,72 +332,109 @@
     thrown.expectMessage(
         "Prometheus Histogram cannot have a label named 'le', "
             + "because it is a reserved label for bucket boundaries. "
-            + "Please remove this tag key from your view.");
-    PrometheusExportUtils.createDescribableMetricFamilySamples(DISTRIBUTION_VIEW_WITH_LE_KEY);
+            + "Please remove this key from your view.");
+    PrometheusExportUtils.createDescribableMetricFamilySamples(LE_LABEL_METRIC_DESCRIPTOR, "");
+  }
+
+  @Test
+  public void createDescribableMetricFamilySamples_Summary_DisallowQuantileLabelName() {
+    thrown.expect(IllegalStateException.class);
+    thrown.expectMessage(
+        "Prometheus Summary cannot have a label named 'quantile', "
+            + "because it is a reserved label. Please remove this key from your view.");
+    PrometheusExportUtils.createDescribableMetricFamilySamples(
+        QUANTILE_LABEL_METRIC_DESCRIPTOR, "");
   }
 
   @Test
   public void createMetricFamilySamples() {
-    assertThat(
-            PrometheusExportUtils.createMetricFamilySamples(
-                ViewData.create(
-                    VIEW1, ImmutableMap.of(Arrays.asList(V1, V2), COUNT_DATA), CUMULATIVE_DATA)))
+    assertThat(PrometheusExportUtils.createMetricFamilySamples(LONG_METRIC, ""))
         .isEqualTo(
             new MetricFamilySamples(
-                "view1",
+                METRIC_NAME,
                 Type.COUNTER,
-                DESCRIPTION,
-                Arrays.asList(
+                METRIC_DESCRIPTION,
+                Collections.singletonList(
                     new Sample(
-                        "view1", Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 12345))));
-    assertThat(
-            PrometheusExportUtils.createMetricFamilySamples(
-                ViewData.create(
-                    VIEW2, ImmutableMap.of(Arrays.asList(V1), MEAN_DATA), CUMULATIVE_DATA)))
+                        METRIC_NAME,
+                        Arrays.asList("k1", "k2"),
+                        Arrays.asList("v1", "v2"),
+                        123456789))));
+    assertThat(PrometheusExportUtils.createMetricFamilySamples(SUMMARY_METRIC, ""))
         .isEqualTo(
             new MetricFamilySamples(
-                "view2",
+                METRIC_NAME2,
                 Type.SUMMARY,
-                DESCRIPTION,
-                Arrays.asList(
-                    new Sample("view2_count", Arrays.asList("k_3"), Arrays.asList("v1"), 22),
-                    new Sample("view2_sum", Arrays.asList("k_3"), Arrays.asList("v1"), 74.8))));
-    assertThat(
-            PrometheusExportUtils.createMetricFamilySamples(
-                ViewData.create(
-                    VIEW3, ImmutableMap.of(Arrays.asList(V3), DISTRIBUTION_DATA), CUMULATIVE_DATA)))
-        .isEqualTo(
-            new MetricFamilySamples(
-                "view_3",
-                Type.HISTOGRAM,
-                DESCRIPTION,
+                METRIC_DESCRIPTION,
                 Arrays.asList(
                     new Sample(
-                        "view_3_bucket",
+                        METRIC_NAME2 + "_count",
+                        Collections.singletonList("k_3"),
+                        Collections.singletonList("v1"),
+                        22),
+                    new Sample(
+                        METRIC_NAME2 + "_sum",
+                        Collections.singletonList("k_3"),
+                        Collections.singletonList("v1"),
+                        74.8),
+                    new Sample(
+                        METRIC_NAME2,
+                        Arrays.asList("k_3", LABEL_NAME_QUANTILE),
+                        Arrays.asList("v1", "0.99"),
+                        10.2))));
+    assertThat(PrometheusExportUtils.createMetricFamilySamples(DISTRIBUTION_METRIC, ""))
+        .isEqualTo(
+            new MetricFamilySamples(
+                METRIC_NAME3,
+                Type.HISTOGRAM,
+                METRIC_DESCRIPTION,
+                Arrays.asList(
+                    new Sample(
+                        METRIC_NAME3 + "_bucket",
                         Arrays.asList("k1", "le"),
-                        Arrays.asList("v-3", "-5.0"),
+                        Arrays.asList("v-3", "1.0"),
                         0),
                     new Sample(
-                        "view_3_bucket", Arrays.asList("k1", "le"), Arrays.asList("v-3", "0.0"), 2),
+                        METRIC_NAME3 + "_bucket",
+                        Arrays.asList("k1", "le"),
+                        Arrays.asList("v-3", "2.0"),
+                        2),
                     new Sample(
-                        "view_3_bucket", Arrays.asList("k1", "le"), Arrays.asList("v-3", "5.0"), 4),
+                        METRIC_NAME3 + "_bucket",
+                        Arrays.asList("k1", "le"),
+                        Arrays.asList("v-3", "5.0"),
+                        4),
                     new Sample(
-                        "view_3_bucket",
+                        METRIC_NAME3 + "_bucket",
                         Arrays.asList("k1", "le"),
                         Arrays.asList("v-3", "+Inf"),
                         5),
-                    new Sample("view_3_count", Arrays.asList("k1"), Arrays.asList("v-3"), 5),
-                    new Sample("view_3_sum", Arrays.asList("k1"), Arrays.asList("v-3"), 22.0))));
-    assertThat(
-            PrometheusExportUtils.createMetricFamilySamples(
-                ViewData.create(
-                    VIEW4, ImmutableMap.of(Arrays.asList(V1), COUNT_DATA), INTERVAL_DATA)))
+                    new Sample(
+                        METRIC_NAME3 + "_count",
+                        Collections.singletonList("k1"),
+                        Collections.singletonList("v-3"),
+                        5),
+                    new Sample(
+                        METRIC_NAME3 + "_sum",
+                        Collections.singletonList("k1"),
+                        Collections.singletonList("v-3"),
+                        22.0))));
+  }
+
+  @Test
+  public void createMetricFamilySamples_WithNamespace() {
+    String namespace = "opencensus_";
+    assertThat(PrometheusExportUtils.createMetricFamilySamples(LONG_METRIC, namespace))
         .isEqualTo(
             new MetricFamilySamples(
-                "_view4",
-                Type.UNTYPED,
-                DESCRIPTION,
-                Arrays.asList(
-                    new Sample("_view4", Arrays.asList("k1"), Arrays.asList("v1"), 12345))));
+                namespace + METRIC_NAME,
+                Type.COUNTER,
+                METRIC_DESCRIPTION,
+                Collections.singletonList(
+                    new Sample(
+                        namespace + METRIC_NAME,
+                        Arrays.asList("k1", "k2"),
+                        Arrays.asList("v1", "v2"),
+                        123456789))));
   }
 }
diff --git a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java
index 3bd9845..1c1e131 100644
--- a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java
+++ b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java
@@ -20,26 +20,26 @@
 import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.LABEL_NAME_BUCKET_BOUND;
 import static org.mockito.Mockito.doReturn;
 
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import io.opencensus.common.Timestamp;
-import io.opencensus.stats.Aggregation.Distribution;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.BucketBoundaries;
-import io.opencensus.stats.Measure.MeasureDouble;
-import io.opencensus.stats.Stats;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow.Cumulative;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
-import io.opencensus.stats.ViewManager;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.Distribution.BucketOptions;
+import io.opencensus.metrics.export.ExportComponent;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricProducer;
+import io.opencensus.metrics.export.MetricProducerManager;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Value;
 import io.prometheus.client.Collector.MetricFamilySamples;
 import io.prometheus.client.Collector.MetricFamilySamples.Sample;
 import io.prometheus.client.Collector.Type;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -50,119 +50,150 @@
 /** Unit tests for {@link PrometheusStatsCollector}. */
 @RunWith(JUnit4.class)
 public class PrometheusStatsCollectorTest {
+  private static final String METRIC_NAME = "my_metric";
+  private static final String METRIC_DESCRIPTION = "metric description";
+  private static final String METRIC_UNIT = "us";
+  private static final String KEY_DESCRIPTION = "key description";
+  private static final LabelKey K1_LABEL_KEY = LabelKey.create("k1", KEY_DESCRIPTION);
+  private static final LabelKey K2_LABEL_KEY = LabelKey.create("k2", KEY_DESCRIPTION);
+  private static final LabelValue V1_LABEL_VALUE = LabelValue.create("v1");
+  private static final LabelValue V2_LABEL_VALUE = LabelValue.create("v2");
+  private static final List<LabelKey> LABEL_KEY = Arrays.asList(K1_LABEL_KEY, K2_LABEL_KEY);
+  private static final List<LabelValue> LABEL_VALUE = Arrays.asList(V1_LABEL_VALUE, V2_LABEL_VALUE);
+  private static final List<LabelKey> LE_LABEL_KEY =
+      Arrays.asList(K1_LABEL_KEY, LabelKey.create(LABEL_NAME_BUCKET_BOUND, KEY_DESCRIPTION));
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION,
+          LABEL_KEY);
+  private static final MetricDescriptor LE_LABEL_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION,
+          LE_LABEL_KEY);
+  private static final Distribution DISTRIBUTION =
+      Distribution.create(
+          5,
+          22,
+          135.22,
+          BucketOptions.explicitOptions(Arrays.asList(1.0, 2.0, 5.0)),
+          Arrays.asList(Bucket.create(0), Bucket.create(2), Bucket.create(2), Bucket.create(1)));
+  private static final Value DISTRIBUTION_VALUE = Value.distributionValue(DISTRIBUTION);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Point DISTRIBUTION_POINT = Point.create(DISTRIBUTION_VALUE, TIMESTAMP);
+  private static final io.opencensus.metrics.export.TimeSeries DISTRIBUTION_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          LABEL_VALUE, DISTRIBUTION_POINT, null);
+  private static final Metric METRIC =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, DISTRIBUTION_TIME_SERIES);
+  private static final Metric LE_LABEL_METRIC =
+      Metric.createWithOneTimeSeries(LE_LABEL_METRIC_DESCRIPTOR, DISTRIBUTION_TIME_SERIES);
 
-  private static final Cumulative CUMULATIVE = Cumulative.create();
-  private static final BucketBoundaries BUCKET_BOUNDARIES =
-      BucketBoundaries.create(Arrays.asList(-5.0, 0.0, 5.0));
-  private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
-  private static final View.Name VIEW_NAME = View.Name.create("view1");
-  private static final String DESCRIPTION = "View description";
-  private static final MeasureDouble MEASURE_DOUBLE =
-      MeasureDouble.create("measure", "description", "1");
-  private static final TagKey K1 = TagKey.create("k1");
-  private static final TagKey K2 = TagKey.create("k2");
-  private static final TagKey LE_TAG_KEY = TagKey.create(LABEL_NAME_BUCKET_BOUND);
-  private static final TagValue V1 = TagValue.create("v1");
-  private static final TagValue V2 = TagValue.create("v2");
-  private static final DistributionData DISTRIBUTION_DATA =
-      DistributionData.create(4.4, 5, -3.2, 15.7, 135.22, Arrays.asList(0L, 2L, 2L, 1L));
-  private static final View VIEW =
-      View.create(
-          VIEW_NAME, DESCRIPTION, MEASURE_DOUBLE, DISTRIBUTION, Arrays.asList(K1, K2), CUMULATIVE);
-  private static final View VIEW_WITH_LE_TAG_KEY =
-      View.create(
-          VIEW_NAME,
-          DESCRIPTION,
-          MEASURE_DOUBLE,
-          DISTRIBUTION,
-          Arrays.asList(K1, LE_TAG_KEY),
-          CUMULATIVE);
-  private static final CumulativeData CUMULATIVE_DATA =
-      CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
-  private static final ViewData VIEW_DATA =
-      ViewData.create(
-          VIEW, ImmutableMap.of(Arrays.asList(V1, V2), DISTRIBUTION_DATA), CUMULATIVE_DATA);
-  private static final ViewData VIEW_DATA_WITH_LE_TAG_KEY =
-      ViewData.create(
-          VIEW_WITH_LE_TAG_KEY,
-          ImmutableMap.of(Arrays.asList(V1, V2), DISTRIBUTION_DATA),
-          CUMULATIVE_DATA);
-
-  @Mock private ViewManager mockViewManager;
+  @Mock private MetricProducerManager mockMetricProducerManager;
+  @Mock private MetricProducer mockMetricProducer;
 
   @Before
   public void setUp() {
     MockitoAnnotations.initMocks(this);
-    doReturn(ImmutableSet.of(VIEW)).when(mockViewManager).getAllExportedViews();
-    doReturn(VIEW_DATA).when(mockViewManager).getView(VIEW_NAME);
+    doReturn(ImmutableSet.of(mockMetricProducer))
+        .when(mockMetricProducerManager)
+        .getAllMetricProducer();
+    doReturn(Collections.singletonList(METRIC)).when(mockMetricProducer).getMetrics();
   }
 
   @Test
   public void testCollect() {
-    PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
-    String name = "view1";
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(mockMetricProducerManager, "");
     assertThat(collector.collect())
         .containsExactly(
             new MetricFamilySamples(
-                "view1",
+                METRIC_NAME,
                 Type.HISTOGRAM,
-                "View description",
+                METRIC_DESCRIPTION,
                 Arrays.asList(
                     new Sample(
-                        name + "_bucket",
+                        METRIC_NAME + "_bucket",
                         Arrays.asList("k1", "k2", "le"),
-                        Arrays.asList("v1", "v2", "-5.0"),
+                        Arrays.asList("v1", "v2", "1.0"),
                         0),
                     new Sample(
-                        name + "_bucket",
+                        METRIC_NAME + "_bucket",
                         Arrays.asList("k1", "k2", "le"),
-                        Arrays.asList("v1", "v2", "0.0"),
+                        Arrays.asList("v1", "v2", "2.0"),
                         2),
                     new Sample(
-                        name + "_bucket",
+                        METRIC_NAME + "_bucket",
                         Arrays.asList("k1", "k2", "le"),
                         Arrays.asList("v1", "v2", "5.0"),
                         4),
                     new Sample(
-                        name + "_bucket",
+                        METRIC_NAME + "_bucket",
                         Arrays.asList("k1", "k2", "le"),
                         Arrays.asList("v1", "v2", "+Inf"),
                         5),
                     new Sample(
-                        name + "_count", Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 5),
+                        METRIC_NAME + "_count",
+                        Arrays.asList("k1", "k2"),
+                        Arrays.asList("v1", "v2"),
+                        5),
                     new Sample(
-                        name + "_sum",
+                        METRIC_NAME + "_sum",
                         Arrays.asList("k1", "k2"),
                         Arrays.asList("v1", "v2"),
                         22.0))));
   }
 
   @Test
-  public void testCollect_SkipDistributionViewWithLeTagKey() {
-    doReturn(ImmutableSet.of(VIEW_WITH_LE_TAG_KEY)).when(mockViewManager).getAllExportedViews();
-    doReturn(VIEW_DATA_WITH_LE_TAG_KEY).when(mockViewManager).getView(VIEW_NAME);
-    PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
+  public void testCollect_SkipDistributionMetricWithLeLabelKey() {
+    doReturn(Collections.singletonList(LE_LABEL_METRIC)).when(mockMetricProducer).getMetrics();
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(mockMetricProducerManager, "");
     assertThat(collector.collect()).isEmpty();
   }
 
   @Test
   public void testDescribe() {
-    PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(mockMetricProducerManager, "");
     assertThat(collector.describe())
         .containsExactly(
             new MetricFamilySamples(
-                "view1", Type.HISTOGRAM, "View description", Collections.<Sample>emptyList()));
+                METRIC_NAME, Type.HISTOGRAM, METRIC_DESCRIPTION, Collections.<Sample>emptyList()));
+  }
+
+  @Test
+  public void testDescribe_WithNamespace() {
+    String namespace = "myorg";
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(mockMetricProducerManager, namespace);
+    assertThat(collector.describe())
+        .containsExactly(
+            new MetricFamilySamples(
+                namespace + '_' + METRIC_NAME,
+                Type.HISTOGRAM,
+                METRIC_DESCRIPTION,
+                Collections.<Sample>emptyList()));
   }
 
   @Test
   public void testCollect_WithNoopViewManager() {
-    PrometheusStatsCollector collector = new PrometheusStatsCollector(Stats.getViewManager());
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(
+            ExportComponent.newNoopExportComponent().getMetricProducerManager(), "");
     assertThat(collector.collect()).isEmpty();
   }
 
   @Test
   public void testDescribe_WithNoopViewManager() {
-    PrometheusStatsCollector collector = new PrometheusStatsCollector(Stats.getViewManager());
+    PrometheusStatsCollector collector =
+        new PrometheusStatsCollector(
+            ExportComponent.newNoopExportComponent().getMetricProducerManager(), "");
     assertThat(collector.describe()).isEmpty();
   }
 }
diff --git a/exporters/stats/signalfx/README.md b/exporters/stats/signalfx/README.md
index 7c61f89..d2acb80 100644
--- a/exporters/stats/signalfx/README.md
+++ b/exporters/stats/signalfx/README.md
@@ -27,17 +27,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-stats-signalfx</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -45,10 +45,10 @@
 
 For Gradle add to your dependencies:
 
-```
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-stats-signalfx:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-stats-signalfx:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 ### Register the exporter
diff --git a/exporters/stats/signalfx/build.gradle b/exporters/stats/signalfx/build.gradle
index d496b1e..01453b5 100644
--- a/exporters/stats/signalfx/build.gradle
+++ b/exporters/stats/signalfx/build.gradle
@@ -9,6 +9,7 @@
     compileOnly libraries.auto_value
 
     compile project(':opencensus-api'),
+            project(':opencensus-exporter-metrics-util'),
             libraries.guava
 
     compile (libraries.signalfx_java) {
@@ -16,8 +17,6 @@
         exclude group: 'com.google.guava', module: 'guava'
     }
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporter.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporter.java
new file mode 100644
index 0000000..064d1f8
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporter.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.signalfx.metrics.errorhandler.MetricError;
+import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
+import com.signalfx.metrics.flush.AggregateMetricSender;
+import com.signalfx.metrics.flush.AggregateMetricSender.Session;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import io.opencensus.exporter.metrics.util.MetricExporter;
+import io.opencensus.metrics.export.Metric;
+import java.io.IOException;
+import java.net.URI;
+import java.util.Collection;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Worker {@code Thread} that polls Metric from Metrics library and exports to SignalFx.
+ *
+ * <p>{@code SignalFxStatsExporterWorkerThread} is a daemon {@code Thread}
+ */
+final class SignalFxMetricExporter extends MetricExporter {
+
+  private static final Logger logger = Logger.getLogger(SignalFxMetricExporter.class.getName());
+
+  private static final OnSendErrorHandler ERROR_HANDLER =
+      new OnSendErrorHandler() {
+        @Override
+        public void handleError(MetricError error) {
+          logger.log(Level.WARNING, "Unable to send metrics to SignalFx: {0}", error.getMessage());
+        }
+      };
+
+  private final AggregateMetricSender sender;
+
+  SignalFxMetricExporter(SignalFxMetricsSenderFactory factory, URI endpoint, String token) {
+    this.sender = factory.create(endpoint, token, ERROR_HANDLER);
+
+    logger.log(Level.FINE, "Initialized SignalFx exporter to {0}.", endpoint);
+  }
+
+  @Override
+  public void export(Collection<Metric> metrics) {
+    Session session = sender.createSession();
+
+    try {
+      for (Metric metric : metrics) {
+        for (DataPoint datapoint : SignalFxSessionAdaptor.adapt(metric)) {
+          session.setDatapoint(datapoint);
+        }
+      }
+    } finally {
+      try {
+        session.close();
+      } catch (IOException e) {
+        logger.log(Level.FINE, "Unable to close the session", e);
+      }
+    }
+  }
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java
index 2eb75c4..a5259ff 100644
--- a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java
@@ -17,172 +17,152 @@
 package io.opencensus.exporter.stats.signalfx;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
 import io.opencensus.common.Function;
-import io.opencensus.stats.Aggregation;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.common.Functions;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.ListIterator;
-import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
-/*>>>
-import org.checkerframework.checker.nullness.qual.Nullable;
-*/
-
-/** Adapter for a {@code ViewData}'s contents into SignalFx datapoints. */
-@SuppressWarnings("deprecation")
+/** Adapter for a {@code Metric}'s contents into SignalFx datapoints. */
 final class SignalFxSessionAdaptor {
+  private static final Logger logger = Logger.getLogger(SignalFxSessionAdaptor.class.getName());
+
+  // Constant functions for Datum.
+  private static final Function<Double, Datum> datumDoubleFunction =
+      new Function<Double, Datum>() {
+        @Override
+        public Datum apply(Double arg) {
+          Datum.Builder builder = Datum.newBuilder();
+          builder.setDoubleValue(arg);
+          return builder.build();
+        }
+      };
+  private static final Function<Long, Datum> datumLongFunction =
+      new Function<Long, Datum>() {
+        @Override
+        public Datum apply(Long arg) {
+          Datum.Builder builder = Datum.newBuilder();
+          builder.setIntValue(arg);
+          return builder.build();
+        }
+      };
+  private static final Function<Distribution, Datum> datumDistributionFunction =
+      new Function<Distribution, Datum>() {
+        @Override
+        public Datum apply(Distribution arg) {
+          // Signal doesn't handle Distribution value.
+          // TODO(mayurkale): decide what to do with Distribution value.
+          logger.log(Level.INFO, "Distribution type is not supported.");
+          Datum.Builder builder = Datum.newBuilder();
+          return builder.build();
+        }
+      };
+  private static final Function<Summary, Datum> datumSummaryFunction =
+      new Function<Summary, Datum>() {
+        @Override
+        public Datum apply(Summary arg) {
+          // Signal doesn't handle Summary value.
+          // TODO(mayurkale): decide what to do with Summary value.
+          logger.log(Level.INFO, "Summary type is not supported.");
+          Datum.Builder builder = Datum.newBuilder();
+          return builder.build();
+        }
+      };
 
   private SignalFxSessionAdaptor() {}
 
   /**
-   * Converts the given view data into datapoints that can be sent to SignalFx.
+   * Converts the given Metric into datapoints that can be sent to SignalFx.
    *
-   * <p>The view name is used as the metric name, and the aggregation type and aggregation window
-   * type determine the metric type.
-   *
-   * @param data The {@link ViewData} containing the aggregation data of each combination of tag
-   *     values.
-   * @return A list of datapoints for the corresponding metric timeseries of this view's metric.
+   * @param metric The {@link Metric} containing the timeseries of each combination of label values.
+   * @return A list of datapoints for the corresponding metric timeseries of this metric.
    */
-  static List<DataPoint> adapt(ViewData data) {
-    View view = data.getView();
-    List<TagKey> keys = view.getColumns();
-
-    MetricType metricType = getMetricTypeForAggregation(view.getAggregation(), view.getWindow());
+  static List<DataPoint> adapt(Metric metric) {
+    MetricDescriptor metricDescriptor = metric.getMetricDescriptor();
+    MetricType metricType = getType(metricDescriptor.getType());
     if (metricType == null) {
       return Collections.emptyList();
     }
 
-    List<DataPoint> datapoints = new ArrayList<>(data.getAggregationMap().size());
-    for (Map.Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
-        data.getAggregationMap().entrySet()) {
-      datapoints.add(
-          DataPoint.newBuilder()
-              .setMetric(view.getName().asString())
-              .setMetricType(metricType)
-              .addAllDimensions(createDimensions(keys, entry.getKey()))
-              .setValue(createDatum(entry.getValue()))
-              .build());
+    DataPoint.Builder shared = DataPoint.newBuilder();
+    shared.setMetric(metricDescriptor.getName());
+    shared.setMetricType(metricType);
+
+    ArrayList<DataPoint> datapoints = Lists.newArrayList();
+    for (TimeSeries timeSeries : metric.getTimeSeriesList()) {
+      DataPoint.Builder builder = shared.clone();
+      builder.addAllDimensions(
+          createDimensions(metricDescriptor.getLabelKeys(), timeSeries.getLabelValues()));
+
+      List<Point> points = timeSeries.getPoints();
+      datapoints.ensureCapacity(datapoints.size() + points.size());
+      for (Point point : points) {
+        datapoints.add(builder.setValue(createDatum(point.getValue())).build());
+      }
     }
     return datapoints;
   }
 
   @VisibleForTesting
   @javax.annotation.Nullable
-  static MetricType getMetricTypeForAggregation(
-      Aggregation aggregation, View.AggregationWindow window) {
-    if (aggregation instanceof Aggregation.Mean || aggregation instanceof Aggregation.LastValue) {
+  static MetricType getType(Type type) {
+    if (type == Type.GAUGE_INT64 || type == Type.GAUGE_DOUBLE) {
       return MetricType.GAUGE;
-    } else if (aggregation instanceof Aggregation.Count || aggregation instanceof Aggregation.Sum) {
-      if (window instanceof View.AggregationWindow.Cumulative) {
-        return MetricType.CUMULATIVE_COUNTER;
-      }
-      // TODO(mpetazzoni): support incremental counters when AggregationWindow.Interval is ready
+    } else if (type == Type.CUMULATIVE_INT64 || type == Type.CUMULATIVE_DOUBLE) {
+      return MetricType.CUMULATIVE_COUNTER;
     }
-
-    // TODO(mpetazzoni): add support for histograms (Aggregation.Distribution).
+    // TODO(mayurkale): decide what to do with Distribution and Summary types.
     return null;
   }
 
   @VisibleForTesting
-  static Iterable<Dimension> createDimensions(
-      List<TagKey> keys, List</*@Nullable*/ TagValue> values) {
-    Preconditions.checkArgument(
-        keys.size() == values.size(), "TagKeys and TagValues don't have the same size.");
+  static Iterable<Dimension> createDimensions(List<LabelKey> keys, List<LabelValue> values) {
     List<Dimension> dimensions = new ArrayList<>(keys.size());
-    for (ListIterator<TagKey> it = keys.listIterator(); it.hasNext(); ) {
-      TagKey key = it.next();
-      TagValue value = values.get(it.previousIndex());
-      if (value == null || Strings.isNullOrEmpty(value.asString())) {
+    for (int i = 0; i < values.size(); i++) {
+      LabelValue value = values.get(i);
+      if (Strings.isNullOrEmpty(value.getValue())) {
         continue;
       }
-      dimensions.add(createDimension(key, value));
+      dimensions.add(createDimension(keys.get(i), value));
     }
     return dimensions;
   }
 
   @VisibleForTesting
-  static Dimension createDimension(TagKey key, TagValue value) {
-    return Dimension.newBuilder().setKey(key.getName()).setValue(value.asString()).build();
+  static Dimension createDimension(LabelKey labelKey, LabelValue labelValue) {
+    Dimension.Builder builder = Dimension.newBuilder();
+    String value = labelValue.getValue();
+    if (!Strings.isNullOrEmpty(value)) {
+      builder.setKey(labelKey.getKey()).setValue(value);
+    }
+    return builder.build();
   }
 
   @VisibleForTesting
-  static Datum createDatum(AggregationData data) {
-    final Datum.Builder builder = Datum.newBuilder();
-    data.match(
-        new Function<SumDataDouble, Void>() {
-          @Override
-          public Void apply(SumDataDouble arg) {
-            builder.setDoubleValue(arg.getSum());
-            return null;
-          }
-        },
-        new Function<SumDataLong, Void>() {
-          @Override
-          public Void apply(SumDataLong arg) {
-            builder.setIntValue(arg.getSum());
-            return null;
-          }
-        },
-        new Function<CountData, Void>() {
-          @Override
-          public Void apply(CountData arg) {
-            builder.setIntValue(arg.getCount());
-            return null;
-          }
-        },
-        new Function<DistributionData, Void>() {
-          @Override
-          public Void apply(DistributionData arg) {
-            // TODO(mpetazzoni): add histogram support.
-            throw new IllegalArgumentException("Distribution aggregations are not supported");
-          }
-        },
-        new Function<LastValueDataDouble, Void>() {
-          @Override
-          public Void apply(LastValueDataDouble arg) {
-            builder.setDoubleValue(arg.getLastValue());
-            return null;
-          }
-        },
-        new Function<LastValueDataLong, Void>() {
-          @Override
-          public Void apply(LastValueDataLong arg) {
-            builder.setIntValue(arg.getLastValue());
-            return null;
-          }
-        },
-        new Function<AggregationData, Void>() {
-          @Override
-          public Void apply(AggregationData arg) {
-            // TODO(songya): remove this once Mean aggregation is completely removed. Before that
-            // we need to continue supporting Mean, since it could still be used by users and some
-            // deprecated RPC views.
-            if (arg instanceof AggregationData.MeanData) {
-              builder.setDoubleValue(((AggregationData.MeanData) arg).getMean());
-              return null;
-            }
-            throw new IllegalArgumentException("Unknown Aggregation.");
-          }
-        });
-    return builder.build();
+  static Datum createDatum(Value value) {
+    return value.match(
+        datumDoubleFunction,
+        datumLongFunction,
+        datumDistributionFunction,
+        datumSummaryFunction,
+        Functions.<Datum>throwIllegalArgumentException());
   }
 }
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java
index f7915b7..aad6824 100644
--- a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java
@@ -18,8 +18,10 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import io.opencensus.stats.Stats;
-import io.opencensus.stats.ViewManager;
+import io.opencensus.exporter.metrics.util.IntervalMetricReader;
+import io.opencensus.exporter.metrics.util.MetricReader;
+import io.opencensus.metrics.Metrics;
+import io.opencensus.metrics.export.MetricProducerManager;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.GuardedBy;
 
@@ -42,22 +44,31 @@
   private static final Object monitor = new Object();
 
   private final SignalFxStatsConfiguration configuration;
-  private final SignalFxStatsExporterWorkerThread workerThread;
+  private final IntervalMetricReader intervalMetricReader;
+
+  private static final String EXPORTER_SPAN_NAME = "ExportMetricsToSignalFX";
 
   @GuardedBy("monitor")
   @Nullable
   private static SignalFxStatsExporter exporter = null;
 
-  private SignalFxStatsExporter(SignalFxStatsConfiguration configuration, ViewManager viewManager) {
-    Preconditions.checkNotNull(configuration, "SignalFx stats exporter configuration");
-    this.configuration = configuration;
-    this.workerThread =
-        new SignalFxStatsExporterWorkerThread(
-            SignalFxMetricsSenderFactory.DEFAULT,
-            configuration.getIngestEndpoint(),
-            configuration.getToken(),
-            configuration.getExportInterval(),
-            viewManager);
+  private SignalFxStatsExporter(
+      SignalFxStatsConfiguration configuration, MetricProducerManager metricProducerManager) {
+    this.configuration = Preconditions.checkNotNull(configuration, "configuration");
+    this.intervalMetricReader =
+        IntervalMetricReader.create(
+            new SignalFxMetricExporter(
+                SignalFxMetricsSenderFactory.DEFAULT,
+                configuration.getIngestEndpoint(),
+                configuration.getToken()),
+            MetricReader.create(
+                MetricReader.Options.builder()
+                    .setMetricProducerManager(metricProducerManager)
+                    .setSpanName(EXPORTER_SPAN_NAME)
+                    .build()),
+            IntervalMetricReader.Options.builder()
+                .setExportInterval(configuration.getExportInterval())
+                .build());
   }
 
   /**
@@ -76,8 +87,9 @@
   public static void create(SignalFxStatsConfiguration configuration) {
     synchronized (monitor) {
       Preconditions.checkState(exporter == null, "SignalFx stats exporter is already created.");
-      exporter = new SignalFxStatsExporter(configuration, Stats.getViewManager());
-      exporter.workerThread.start();
+      exporter =
+          new SignalFxStatsExporter(
+              configuration, Metrics.getExportComponent().getMetricProducerManager());
     }
   }
 
@@ -85,14 +97,8 @@
   static void unsafeResetExporter() {
     synchronized (monitor) {
       if (exporter != null) {
-        SignalFxStatsExporterWorkerThread workerThread = exporter.workerThread;
-        if (workerThread != null && workerThread.isAlive()) {
-          try {
-            workerThread.interrupt();
-            workerThread.join();
-          } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-          }
+        if (exporter.intervalMetricReader != null) {
+          exporter.intervalMetricReader.stop();
         }
         exporter = null;
       }
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java
deleted file mode 100644
index 348778e..0000000
--- a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.exporter.stats.signalfx;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.signalfx.metrics.errorhandler.MetricError;
-import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
-import com.signalfx.metrics.flush.AggregateMetricSender;
-import com.signalfx.metrics.flush.AggregateMetricSender.Session;
-import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
-import io.opencensus.common.Duration;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewManager;
-import java.io.IOException;
-import java.net.URI;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-/**
- * Worker {@code Thread} that polls ViewData from the Stats's ViewManager and exports to SignalFx.
- *
- * <p>{@code SignalFxStatsExporterWorkerThread} is a daemon {@code Thread}
- */
-final class SignalFxStatsExporterWorkerThread extends Thread {
-
-  private static final Logger logger =
-      Logger.getLogger(SignalFxStatsExporterWorkerThread.class.getName());
-
-  private static final OnSendErrorHandler ERROR_HANDLER =
-      new OnSendErrorHandler() {
-        @Override
-        public void handleError(MetricError error) {
-          logger.log(Level.WARNING, "Unable to send metrics to SignalFx: {0}", error.getMessage());
-        }
-      };
-
-  private final long intervalMs;
-  private final ViewManager views;
-  private final AggregateMetricSender sender;
-
-  SignalFxStatsExporterWorkerThread(
-      SignalFxMetricsSenderFactory factory,
-      URI endpoint,
-      String token,
-      Duration interval,
-      ViewManager views) {
-    this.intervalMs = interval.toMillis();
-    this.views = views;
-    this.sender = factory.create(endpoint, token, ERROR_HANDLER);
-
-    setDaemon(true);
-    setName(getClass().getSimpleName());
-    logger.log(Level.FINE, "Initialized SignalFx exporter to {0}.", endpoint);
-  }
-
-  @VisibleForTesting
-  void export() throws IOException {
-    Session session = sender.createSession();
-    try {
-      for (View view : views.getAllExportedViews()) {
-        ViewData data = views.getView(view.getName());
-        if (data == null) {
-          continue;
-        }
-
-        for (DataPoint datapoint : SignalFxSessionAdaptor.adapt(data)) {
-          session.setDatapoint(datapoint);
-        }
-      }
-    } finally {
-      session.close();
-    }
-  }
-
-  @Override
-  public void run() {
-    while (true) {
-      try {
-        export();
-        Thread.sleep(intervalMs);
-      } catch (InterruptedException ie) {
-        Thread.currentThread().interrupt();
-        break;
-      } catch (Throwable e) {
-        logger.log(Level.WARNING, "Exception thrown by the SignalFx stats exporter", e);
-      }
-    }
-    logger.log(Level.INFO, "SignalFx stats exporter stopped.");
-  }
-}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporterTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporterTest.java
new file mode 100644
index 0000000..6d7d7fb
--- /dev/null
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricExporterTest.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import static org.mockito.Mockito.doReturn;
+
+import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
+import com.signalfx.metrics.flush.AggregateMetricSender;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.io.IOException;
+import java.net.URI;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.stubbing.Answer;
+
+/** Unit tests for {@link SignalFxMetricExporter}. */
+@RunWith(MockitoJUnitRunner.class)
+public class SignalFxMetricExporterTest {
+
+  private static final String TEST_TOKEN = "token";
+  private static final String METRIC_NAME = "metric-name";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final String METRIC_UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key1", "description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("value1"));
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
+  private static final Value VALUE_LONG = Value.doubleValue(3.15d);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Point POINT = Point.create(VALUE_LONG, TIMESTAMP);
+  private static final TimeSeries TIME_SERIES =
+      TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, null);
+  private static final Metric METRIC =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, TIME_SERIES);
+
+  @Mock private AggregateMetricSender.Session session;
+  @Mock private SignalFxMetricsSenderFactory factory;
+
+  private URI endpoint;
+
+  @Before
+  public void setUp() throws Exception {
+    endpoint = new URI("http://example.com");
+
+    Mockito.when(
+            factory.create(
+                Mockito.any(URI.class), Mockito.anyString(), Mockito.any(OnSendErrorHandler.class)))
+        .thenAnswer(
+            new Answer<AggregateMetricSender>() {
+              @Override
+              public AggregateMetricSender answer(InvocationOnMock invocation) {
+                Object[] args = invocation.getArguments();
+                AggregateMetricSender sender =
+                    SignalFxMetricsSenderFactory.DEFAULT.create(
+                        (URI) args[0], (String) args[1], (OnSendErrorHandler) args[2]);
+                AggregateMetricSender spy = Mockito.spy(sender);
+                doReturn(session).when(spy).createSession();
+                return spy;
+              }
+            });
+  }
+
+  @Test
+  public void setsDatapointsFromViewOnSession() throws IOException {
+    SignalFxMetricExporter exporter = new SignalFxMetricExporter(factory, endpoint, TEST_TOKEN);
+    exporter.export(Collections.singletonList(METRIC));
+
+    DataPoint datapoint =
+        DataPoint.newBuilder()
+            .setMetric("metric-name")
+            .setMetricType(MetricType.GAUGE)
+            .addDimensions(Dimension.newBuilder().setKey("key1").setValue("value1").build())
+            .setValue(Datum.newBuilder().setDoubleValue(3.15d).build())
+            .build();
+    Mockito.verify(session).setDatapoint(Mockito.eq(datapoint));
+    Mockito.verify(session).close();
+  }
+}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java
index 34f4dfa..ddace19 100644
--- a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java
@@ -16,6 +16,7 @@
 
 package io.opencensus.exporter.stats.signalfx;
 
+import static com.google.common.truth.Truth.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
@@ -23,110 +24,104 @@
 import static org.junit.Assert.fail;
 
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
 import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
-import io.opencensus.common.Duration;
-import io.opencensus.stats.Aggregation;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.MeanData;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.BucketBoundaries;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow;
-import io.opencensus.stats.View.Name;
-import io.opencensus.stats.ViewData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Distribution;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.Distribution.BucketOptions;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
-import java.util.Map;
-import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.junit.runners.JUnit4;
 
-@RunWith(MockitoJUnitRunner.class)
+/** Unit tests for {@link SignalFxSessionAdaptor}. */
+@RunWith(JUnit4.class)
 public class SignalFxSessionAdaptorTest {
-
-  private static final Duration ONE_SECOND = Duration.create(1, 0);
-
-  @Rule public final ExpectedException thrown = ExpectedException.none();
-
-  @Mock private View view;
-
-  @Mock private ViewData viewData;
-
-  @Before
-  public void setUp() {
-    Mockito.when(view.getName()).thenReturn(Name.create("view-name"));
-    Mockito.when(view.getColumns()).thenReturn(ImmutableList.of(TagKey.create("animal")));
-    Mockito.when(viewData.getView()).thenReturn(view);
-  }
+  private static final String METRIC_NAME = "metric-name";
+  private static final String METRIC_DESCRIPTION = "metric-description";
+  private static final String METRIC_UNIT = "1";
+  private static final LabelKey LABEL_KEY_1 = LabelKey.create("key1", "description");
+  private static final LabelKey LABEL_KEY_2 = LabelKey.create("key2", "description");
+  private static final LabelValue LABEL_VALUE_1 = LabelValue.create("value1");
+  private static final LabelValue LABEL_VALUE_2 = LabelValue.create("value2");
+  private static final LabelValue EMPTY_LABEL_VALUE = LabelValue.create("");
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          Type.CUMULATIVE_INT64,
+          Collections.singletonList(LABEL_KEY_1));
+  private static final MetricDescriptor DISTRIBUTION_METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          Type.CUMULATIVE_DISTRIBUTION,
+          Collections.singletonList(LABEL_KEY_1));
+  private static final List<Double> BUCKET_BOUNDARIES = Arrays.asList(1.0, 3.0, 5.0);
+  private static final Distribution DISTRIBUTION =
+      Distribution.create(
+          3,
+          2,
+          14,
+          BucketOptions.explicitOptions(BUCKET_BOUNDARIES),
+          Arrays.asList(Bucket.create(3), Bucket.create(1), Bucket.create(2), Bucket.create(4)));
+  private static final Summary SUMMARY =
+      Summary.create(
+          10L,
+          10.0,
+          Snapshot.create(
+              10L, 87.07, Collections.singletonList(ValueAtPercentile.create(0.98, 10.2))));
+  private static final Value VALUE_LONG = Value.longValue(42L);
+  private static final Value VALUE_DOUBLE = Value.doubleValue(12.2);
+  private static final Value VALUE_DISTRIBUTION = Value.distributionValue(DISTRIBUTION);
+  private static final Value VALUE_SUMMARY = Value.summaryValue(SUMMARY);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Point POINT_1 = Point.create(Value.longValue(2L), TIMESTAMP);
+  private static final Point POINT_2 = Point.create(Value.longValue(3L), TIMESTAMP);
+  private static final TimeSeries TIME_SERIES_1 =
+      TimeSeries.createWithOnePoint(Collections.singletonList(LABEL_VALUE_1), POINT_1, null);
+  private static final TimeSeries TIME_SERIES_2 =
+      TimeSeries.createWithOnePoint(Collections.singletonList(LABEL_VALUE_2), POINT_2, null);
+  private static final TimeSeries TIME_SERIES_3 =
+      TimeSeries.createWithOnePoint(Collections.singletonList(EMPTY_LABEL_VALUE), POINT_2, null);
+  private static final Metric METRIC =
+      Metric.create(METRIC_DESCRIPTOR, Arrays.asList(TIME_SERIES_1, TIME_SERIES_2));
+  private static final Metric METRIC_1 =
+      Metric.create(METRIC_DESCRIPTOR, Arrays.asList(TIME_SERIES_1, TIME_SERIES_3));
+  private static final Metric DISTRIBUTION_METRIC =
+      Metric.create(DISTRIBUTION_METRIC_DESCRIPTOR, Collections.<TimeSeries>emptyList());
 
   @Test
-  public void checkMetricTypeFromAggregation() {
-    assertNull(SignalFxSessionAdaptor.getMetricTypeForAggregation(null, null));
-    assertNull(
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            null, AggregationWindow.Cumulative.create()));
+  public void checkMetricType() {
+    assertNull(SignalFxSessionAdaptor.getType(null));
+    assertEquals(MetricType.GAUGE, SignalFxSessionAdaptor.getType(Type.GAUGE_INT64));
+    assertEquals(MetricType.GAUGE, SignalFxSessionAdaptor.getType(Type.GAUGE_DOUBLE));
     assertEquals(
-        MetricType.GAUGE,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Mean.create(), AggregationWindow.Cumulative.create()));
+        MetricType.CUMULATIVE_COUNTER, SignalFxSessionAdaptor.getType(Type.CUMULATIVE_INT64));
     assertEquals(
-        MetricType.GAUGE,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Mean.create(), AggregationWindow.Interval.create(ONE_SECOND)));
-    assertEquals(
-        MetricType.CUMULATIVE_COUNTER,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Count.create(), AggregationWindow.Cumulative.create()));
-    assertEquals(
-        MetricType.CUMULATIVE_COUNTER,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Sum.create(), AggregationWindow.Cumulative.create()));
-    assertNull(
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(Aggregation.Count.create(), null));
-    assertNull(SignalFxSessionAdaptor.getMetricTypeForAggregation(Aggregation.Sum.create(), null));
-    assertNull(
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Count.create(), AggregationWindow.Interval.create(ONE_SECOND)));
-    assertNull(
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Sum.create(), AggregationWindow.Interval.create(ONE_SECOND)));
-    assertNull(
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.Distribution.create(BucketBoundaries.create(ImmutableList.of(3.15d))),
-            AggregationWindow.Cumulative.create()));
-    assertEquals(
-        MetricType.GAUGE,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.LastValue.create(), AggregationWindow.Cumulative.create()));
-    assertEquals(
-        MetricType.GAUGE,
-        SignalFxSessionAdaptor.getMetricTypeForAggregation(
-            Aggregation.LastValue.create(), AggregationWindow.Interval.create(ONE_SECOND)));
-  }
-
-  @Test
-  public void createDimensionsWithNonMatchingListSizes() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("don't have the same size");
-    SignalFxSessionAdaptor.createDimensions(
-        ImmutableList.of(TagKey.create("animal"), TagKey.create("color")),
-        ImmutableList.of(TagValue.create("dog")));
+        MetricType.CUMULATIVE_COUNTER, SignalFxSessionAdaptor.getType(Type.CUMULATIVE_DOUBLE));
+    assertNull(SignalFxSessionAdaptor.getType(Type.SUMMARY));
+    assertNull(SignalFxSessionAdaptor.getType(Type.GAUGE_DISTRIBUTION));
+    assertNull(SignalFxSessionAdaptor.getType(Type.CUMULATIVE_DISTRIBUTION));
   }
 
   @Test
@@ -134,53 +129,29 @@
     List<Dimension> dimensions =
         Lists.newArrayList(
             SignalFxSessionAdaptor.createDimensions(
-                ImmutableList.of(TagKey.create("animal"), TagKey.create("color")),
-                ImmutableList.of(TagValue.create("dog"), TagValue.create(""))));
+                ImmutableList.of(LABEL_KEY_1, LABEL_KEY_2),
+                ImmutableList.of(LABEL_VALUE_1, EMPTY_LABEL_VALUE)));
     assertEquals(1, dimensions.size());
-    assertEquals("animal", dimensions.get(0).getKey());
-    assertEquals("dog", dimensions.get(0).getValue());
+    assertEquals(LABEL_KEY_1.getKey(), dimensions.get(0).getKey());
+    assertEquals(LABEL_VALUE_1.getValue(), dimensions.get(0).getValue());
   }
 
   @Test
   public void createDimension() {
-    Dimension dimension =
-        SignalFxSessionAdaptor.createDimension(TagKey.create("animal"), TagValue.create("dog"));
-    assertEquals("animal", dimension.getKey());
-    assertEquals("dog", dimension.getValue());
+    Dimension dimension = SignalFxSessionAdaptor.createDimension(LABEL_KEY_1, LABEL_VALUE_1);
+    assertEquals(LABEL_KEY_1.getKey(), dimension.getKey());
+    assertEquals(LABEL_VALUE_1.getValue(), dimension.getValue());
   }
 
   @Test
-  public void unsupportedAggregationYieldsNoDatapoints() {
-    Mockito.when(view.getAggregation())
-        .thenReturn(
-            Aggregation.Distribution.create(BucketBoundaries.create(ImmutableList.of(3.15d))));
-    Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
-    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+  public void adoptMetricNoDatapoints() {
+    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(DISTRIBUTION_METRIC);
     assertEquals(0, datapoints.size());
   }
 
   @Test
-  public void noAggregationDataYieldsNoDatapoints() {
-    Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
-    Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
-    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
-    assertEquals(0, datapoints.size());
-  }
-
-  @Test
-  public void createDatumFromDoubleSum() {
-    SumDataDouble data = SumDataDouble.create(3.15d);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
-    assertTrue(datum.hasDoubleValue());
-    assertFalse(datum.hasIntValue());
-    assertFalse(datum.hasStrValue());
-    assertEquals(3.15d, datum.getDoubleValue(), 0d);
-  }
-
-  @Test
-  public void createDatumFromLongSum() {
-    SumDataLong data = SumDataLong.create(42L);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
+  public void createDatumFromValueLong() {
+    Datum datum = SignalFxSessionAdaptor.createDatum(VALUE_LONG);
     assertFalse(datum.hasDoubleValue());
     assertTrue(datum.hasIntValue());
     assertFalse(datum.hasStrValue());
@@ -188,37 +159,20 @@
   }
 
   @Test
-  public void createDatumFromCount() {
-    CountData data = CountData.create(42L);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
-    assertFalse(datum.hasDoubleValue());
-    assertTrue(datum.hasIntValue());
-    assertFalse(datum.hasStrValue());
-    assertEquals(42L, datum.getIntValue());
+  public void createDatumFromValueDistribution() {
+    Datum datum = SignalFxSessionAdaptor.createDatum(VALUE_DISTRIBUTION);
+    assertThat(datum).isEqualTo(Datum.newBuilder().build());
   }
 
   @Test
-  public void createDatumFromMean() {
-    MeanData data = MeanData.create(3.15d, 2L);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
-    assertTrue(datum.hasDoubleValue());
-    assertFalse(datum.hasIntValue());
-    assertFalse(datum.hasStrValue());
-    assertEquals(3.15d, datum.getDoubleValue(), 0d);
+  public void createDatumFromValueSummary() {
+    Datum datum = SignalFxSessionAdaptor.createDatum(VALUE_SUMMARY);
+    assertThat(datum).isEqualTo(Datum.newBuilder().build());
   }
 
   @Test
-  public void createDatumFromDistributionThrows() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Distribution aggregations are not supported");
-    SignalFxSessionAdaptor.createDatum(
-        DistributionData.create(5, 2, 0, 10, 40, ImmutableList.of(1L)));
-  }
-
-  @Test
-  public void createDatumFromLastValueDouble() {
-    LastValueDataDouble data = LastValueDataDouble.create(12.2);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
+  public void createDatumFromValueDouble() {
+    Datum datum = SignalFxSessionAdaptor.createDatum(VALUE_DOUBLE);
     assertTrue(datum.hasDoubleValue());
     assertFalse(datum.hasIntValue());
     assertFalse(datum.hasStrValue());
@@ -226,31 +180,11 @@
   }
 
   @Test
-  public void createDatumFromLastValueLong() {
-    LastValueDataLong data = LastValueDataLong.create(100000);
-    Datum datum = SignalFxSessionAdaptor.createDatum(data);
-    assertFalse(datum.hasDoubleValue());
-    assertTrue(datum.hasIntValue());
-    assertFalse(datum.hasStrValue());
-    assertEquals(100000, datum.getIntValue());
-  }
-
-  @Test
-  public void adaptViewIntoDatapoints() {
-    Map<List<TagValue>, AggregationData> map =
-        ImmutableMap.<List<TagValue>, AggregationData>of(
-            ImmutableList.of(TagValue.create("dog")),
-            SumDataLong.create(2L),
-            ImmutableList.of(TagValue.create("cat")),
-            SumDataLong.create(3L));
-    Mockito.when(viewData.getAggregationMap()).thenReturn(map);
-    Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
-    Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
-
-    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+  public void adaptMetricIntoDatapoints() {
+    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(METRIC);
     assertEquals(2, datapoints.size());
     for (DataPoint dp : datapoints) {
-      assertEquals("view-name", dp.getMetric());
+      assertEquals(METRIC_NAME, dp.getMetric());
       assertEquals(MetricType.CUMULATIVE_COUNTER, dp.getMetricType());
       assertEquals(1, dp.getDimensionsCount());
       assertTrue(dp.hasValue());
@@ -262,12 +196,12 @@
       assertFalse(datum.hasStrValue());
 
       Dimension dimension = dp.getDimensions(0);
-      assertEquals("animal", dimension.getKey());
+      assertEquals(LABEL_KEY_1.getKey(), dimension.getKey());
       switch (dimension.getValue()) {
-        case "dog":
+        case "value1":
           assertEquals(2L, datum.getIntValue());
           break;
-        case "cat":
+        case "value2":
           assertEquals(3L, datum.getIntValue());
           break;
         default:
@@ -277,21 +211,11 @@
   }
 
   @Test
-  public void adaptViewWithEmptyTagValueIntoDatapoints() {
-    Map<List<TagValue>, AggregationData> map =
-        ImmutableMap.<List<TagValue>, AggregationData>of(
-            ImmutableList.of(TagValue.create("dog")),
-            SumDataLong.create(2L),
-            ImmutableList.of(TagValue.create("")),
-            SumDataLong.create(3L));
-    Mockito.when(viewData.getAggregationMap()).thenReturn(map);
-    Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
-    Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
-
-    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+  public void adaptMetricWithEmptyLabelValueIntoDatapoints() {
+    List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(METRIC_1);
     assertEquals(2, datapoints.size());
     for (DataPoint dp : datapoints) {
-      assertEquals("view-name", dp.getMetric());
+      assertEquals(METRIC_NAME, dp.getMetric());
       assertEquals(MetricType.CUMULATIVE_COUNTER, dp.getMetricType());
       assertTrue(dp.hasValue());
       assertFalse(dp.hasSource());
@@ -307,8 +231,8 @@
           break;
         case 1:
           Dimension dimension = dp.getDimensions(0);
-          assertEquals("animal", dimension.getKey());
-          assertEquals("dog", dimension.getValue());
+          assertEquals(LABEL_KEY_1.getKey(), dimension.getKey());
+          assertEquals(LABEL_VALUE_1.getValue(), dimension.getValue());
           assertEquals(2L, datum.getIntValue());
           break;
         default:
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java
deleted file mode 100644
index d8852d5..0000000
--- a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.exporter.stats.signalfx;
-
-import static org.hamcrest.CoreMatchers.startsWith;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
-import com.signalfx.metrics.flush.AggregateMetricSender;
-import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
-import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
-import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
-import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
-import io.opencensus.common.Duration;
-import io.opencensus.stats.Aggregation;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.MeanData;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow;
-import io.opencensus.stats.View.Name;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewManager;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
-import java.io.IOException;
-import java.net.URI;
-import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.mockito.stubbing.Answer;
-
-@RunWith(MockitoJUnitRunner.class)
-public class SignalFxStatsExporterWorkerThreadTest {
-
-  private static final String TEST_TOKEN = "token";
-  private static final Duration ONE_SECOND = Duration.create(1, 0);
-
-  @Mock private AggregateMetricSender.Session session;
-
-  @Mock private ViewManager viewManager;
-
-  @Mock private SignalFxMetricsSenderFactory factory;
-
-  private URI endpoint;
-
-  @Before
-  public void setUp() throws Exception {
-    endpoint = new URI("http://example.com");
-
-    Mockito.when(
-            factory.create(
-                Mockito.any(URI.class), Mockito.anyString(), Mockito.any(OnSendErrorHandler.class)))
-        .thenAnswer(
-            new Answer<AggregateMetricSender>() {
-              @Override
-              public AggregateMetricSender answer(InvocationOnMock invocation) {
-                Object[] args = invocation.getArguments();
-                AggregateMetricSender sender =
-                    SignalFxMetricsSenderFactory.DEFAULT.create(
-                        (URI) args[0], (String) args[1], (OnSendErrorHandler) args[2]);
-                AggregateMetricSender spy = Mockito.spy(sender);
-                Mockito.doReturn(session).when(spy).createSession();
-                return spy;
-              }
-            });
-  }
-
-  @Test
-  public void createThread() {
-    SignalFxStatsExporterWorkerThread thread =
-        new SignalFxStatsExporterWorkerThread(
-            factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
-    assertTrue(thread.isDaemon());
-    assertThat(thread.getName(), startsWith("SignalFx"));
-  }
-
-  @Test
-  public void senderThreadInterruptStopsLoop() throws InterruptedException {
-    Mockito.when(session.setDatapoint(Mockito.any(DataPoint.class))).thenReturn(session);
-    Mockito.when(viewManager.getAllExportedViews()).thenReturn(ImmutableSet.<View>of());
-
-    SignalFxStatsExporterWorkerThread thread =
-        new SignalFxStatsExporterWorkerThread(
-            factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
-    thread.start();
-    thread.interrupt();
-    thread.join(5000, 0);
-    assertFalse("Worker thread should have stopped", thread.isAlive());
-  }
-
-  @Test
-  public void setsDatapointsFromViewOnSession() throws IOException {
-    View view = Mockito.mock(View.class);
-    Name viewName = Name.create("test");
-    Mockito.when(view.getName()).thenReturn(viewName);
-    Mockito.when(view.getAggregation()).thenReturn(Aggregation.Mean.create());
-    Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
-    Mockito.when(view.getColumns()).thenReturn(ImmutableList.of(TagKey.create("animal")));
-
-    ViewData viewData = Mockito.mock(ViewData.class);
-    Mockito.when(viewData.getView()).thenReturn(view);
-    Mockito.when(viewData.getAggregationMap())
-        .thenReturn(
-            ImmutableMap.<List<TagValue>, AggregationData>of(
-                ImmutableList.of(TagValue.create("cat")), MeanData.create(3.15d, 1)));
-
-    Mockito.when(viewManager.getAllExportedViews()).thenReturn(ImmutableSet.of(view));
-    Mockito.when(viewManager.getView(Mockito.eq(viewName))).thenReturn(viewData);
-
-    SignalFxStatsExporterWorkerThread thread =
-        new SignalFxStatsExporterWorkerThread(
-            factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
-    thread.export();
-
-    DataPoint datapoint =
-        DataPoint.newBuilder()
-            .setMetric("test")
-            .setMetricType(MetricType.GAUGE)
-            .addDimensions(Dimension.newBuilder().setKey("animal").setValue("cat").build())
-            .setValue(Datum.newBuilder().setDoubleValue(3.15d).build())
-            .build();
-    Mockito.verify(session).setDatapoint(Mockito.eq(datapoint));
-    Mockito.verify(session).close();
-  }
-}
diff --git a/exporters/stats/stackdriver/README.md b/exporters/stats/stackdriver/README.md
index 1b35c63..9458531 100644
--- a/exporters/stats/stackdriver/README.md
+++ b/exporters/stats/stackdriver/README.md
@@ -30,17 +30,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-stats-stackdriver</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -48,9 +48,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-stats-stackdriver:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-stats-stackdriver:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
@@ -122,7 +122,7 @@
     StackdriverStatsConfiguration.builder()
         .setCredentials(new GoogleCredentials(new AccessToken(accessToken, expirationTime)))
         .setProjectId("MyStackdriverProjectId")
-        .setExportInterval(Duration.create(10, 0))
+        .setExportInterval(Duration.create(60, 0))
         .build());
 ```
 
@@ -156,6 +156,15 @@
 of the `Timeseries`. The format of the label is: `{LANGUAGE}-{PID}@{HOSTNAME}`, if `{PID}` is not
 available a random number will be used.
 
+You have the option to override the "opencensus_task" metric label with custom constant labels using
+`StackdriverStatsConfiguration.Builder.setConstantLabels()`. If you do so, make sure that the 
+monitored resource together with these labels is unique to the current process. This is to ensure 
+that there is only a single writer to each time series in Stackdriver.
+
+You can also set `StackdriverStatsConfiguration.Builder.setConstantLabels()` to an empty map to 
+avoid getting the default "opencensus_task" label. You should only do this if you know that the 
+monitored resource uniquely identifies this process.
+
 ### Why did I get an error "java.lang.NoSuchMethodError: com.google.common...", like "java.lang.NoSuchMethodError:com.google.common.base.Throwables.throwIfInstanceOf"?
 This is probably because there is a version conflict on Guava in the dependency tree.
 
diff --git a/exporters/stats/stackdriver/build.gradle b/exporters/stats/stackdriver/build.gradle
index 0bc302a..12fc355 100644
--- a/exporters/stats/stackdriver/build.gradle
+++ b/exporters/stats/stackdriver/build.gradle
@@ -9,8 +9,14 @@
     compileOnly libraries.auto_value
 
     compile project(':opencensus-api'),
-            project(':opencensus-contrib-monitored-resource-util'),
+            project(':opencensus-contrib-exemplar-util'),
+            project(':opencensus-contrib-resource-util'),
+            project(':opencensus-exporter-metrics-util'),
             libraries.google_auth,
+            libraries.grpc_auth,
+            libraries.grpc_core,
+            libraries.grpc_netty_shaded,
+            libraries.grpc_stub,
             libraries.guava
 
     compile (libraries.google_cloud_monitoring) {
@@ -20,11 +26,15 @@
         // Prefer library version.
         exclude group: 'com.google.code.findbugs', module: 'jsr305'
 
+        // Prefer library version.
+        exclude group: 'io.grpc', module: 'grpc-auth'
+        exclude group: 'io.grpc', module: 'grpc-core'
+        exclude group: 'io.grpc', module: 'grpc-netty-shaded'
+        exclude group: 'io.grpc', module: 'grpc-stub'
+
         // We will always be more up to date.
         exclude group: 'io.opencensus', module: 'opencensus-api'
     }
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
 }
\ No newline at end of file
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporter.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporter.java
new file mode 100644
index 0000000..fc39591
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporter.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import com.google.api.MetricDescriptor;
+import com.google.api.gax.rpc.ApiException;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.common.collect.ImmutableSet;
+import com.google.monitoring.v3.CreateMetricDescriptorRequest;
+import com.google.monitoring.v3.ProjectName;
+import io.opencensus.exporter.metrics.util.MetricExporter;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+final class CreateMetricDescriptorExporter extends MetricExporter {
+  private static final Tracer tracer = Tracing.getTracer();
+  private static final Logger logger =
+      Logger.getLogger(CreateMetricDescriptorExporter.class.getName());
+  private static final ImmutableSet<String> SUPPORTED_EXTERNAL_DOMAINS =
+      ImmutableSet.<String>of(
+          "custom.googleapis.com", "external.googleapis.com", "workload.googleapis.com");
+  private static final String GOOGLE_APIS_DOMAIN_SUFFIX = "googleapis.com";
+
+  private final String projectId;
+  private final ProjectName projectName;
+  private final MetricServiceClient metricServiceClient;
+  private final String domain;
+  private final String displayNamePrefix;
+  private final Map<String, io.opencensus.metrics.export.MetricDescriptor>
+      registeredMetricDescriptors = new LinkedHashMap<>();
+  private final Map<LabelKey, LabelValue> constantLabels;
+  private final MetricExporter nextExporter;
+
+  CreateMetricDescriptorExporter(
+      String projectId,
+      MetricServiceClient metricServiceClient,
+      @javax.annotation.Nullable String metricNamePrefix,
+      @javax.annotation.Nullable String displayNamePrefix,
+      Map<LabelKey, LabelValue> constantLabels,
+      MetricExporter nextExporter) {
+    this.projectId = projectId;
+    projectName = ProjectName.newBuilder().setProject(projectId).build();
+    this.metricServiceClient = metricServiceClient;
+    this.domain = StackdriverExportUtils.getDomain(metricNamePrefix);
+    this.displayNamePrefix =
+        StackdriverExportUtils.getDisplayNamePrefix(
+            displayNamePrefix == null ? metricNamePrefix : displayNamePrefix);
+    this.constantLabels = constantLabels;
+    this.nextExporter = nextExporter;
+  }
+
+  // Returns true if the given metricDescriptor is successfully registered to Stackdriver
+  // Monitoring, or the
+  // exact same metric has already been registered. Returns false otherwise.
+  private boolean registerMetricDescriptor(
+      io.opencensus.metrics.export.MetricDescriptor metricDescriptor) {
+    String metricName = metricDescriptor.getName();
+    io.opencensus.metrics.export.MetricDescriptor existingMetricDescriptor =
+        registeredMetricDescriptors.get(metricName);
+    if (existingMetricDescriptor != null) {
+      if (existingMetricDescriptor.equals(metricDescriptor)) {
+        // Ignore metricDescriptor that are already registered.
+        return true;
+      } else {
+        logger.log(
+            Level.WARNING,
+            "A different metric with the same name is already registered: "
+                + existingMetricDescriptor);
+        return false;
+      }
+    }
+    registeredMetricDescriptors.put(metricName, metricDescriptor);
+    if (isBuiltInMetric(metricName)) {
+      return true; // skip creating metric descriptor for stackdriver built-in metrics.
+    }
+
+    Span span = tracer.getCurrentSpan();
+    span.addAnnotation("Create Stackdriver Metric.");
+    MetricDescriptor stackDriverMetricDescriptor =
+        StackdriverExportUtils.createMetricDescriptor(
+            metricDescriptor, projectId, domain, displayNamePrefix, constantLabels);
+
+    CreateMetricDescriptorRequest request =
+        CreateMetricDescriptorRequest.newBuilder()
+            .setName(projectName.toString())
+            .setMetricDescriptor(stackDriverMetricDescriptor)
+            .build();
+    try {
+      metricServiceClient.createMetricDescriptor(request);
+      span.addAnnotation("Finish creating MetricDescriptor.");
+      return true;
+    } catch (ApiException e) {
+      logger.log(Level.WARNING, "ApiException thrown when creating MetricDescriptor.", e);
+      span.setStatus(
+          Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
+              .toStatus()
+              .withDescription(
+                  "ApiException thrown when creating MetricDescriptor: "
+                      + StackdriverExportUtils.exceptionMessage(e)));
+      return false;
+    } catch (Throwable e) {
+      logger.log(Level.WARNING, "Exception thrown when creating MetricDescriptor.", e);
+      span.setStatus(
+          Status.UNKNOWN.withDescription(
+              "Exception thrown when creating MetricDescriptor: "
+                  + StackdriverExportUtils.exceptionMessage(e)));
+      return false;
+    }
+  }
+
+  @Override
+  public void export(Collection<Metric> metrics) {
+    ArrayList<Metric> registeredMetrics = new ArrayList<>(metrics.size());
+    for (Metric metric : metrics) {
+      final io.opencensus.metrics.export.MetricDescriptor metricDescriptor =
+          metric.getMetricDescriptor();
+      if (metricDescriptor.getType() == Type.SUMMARY) {
+        List<Metric> convertedMetrics = StackdriverExportUtils.convertSummaryMetric(metric);
+        registeredMetrics.ensureCapacity(registeredMetrics.size() + convertedMetrics.size());
+        for (Metric convertedMetric : convertedMetrics) {
+          if (registerMetricDescriptor(convertedMetric.getMetricDescriptor())) {
+            registeredMetrics.add(convertedMetric);
+          }
+        }
+      } else {
+        if (registerMetricDescriptor(metricDescriptor)) {
+          registeredMetrics.add(metric);
+        }
+      }
+    }
+    nextExporter.export(registeredMetrics);
+  }
+
+  private static boolean isBuiltInMetric(String metricName) {
+    int domainIndex = metricName.indexOf('/');
+    if (domainIndex < 0) {
+      return false;
+    }
+    String metricDomain = metricName.substring(0, domainIndex);
+    if (!metricDomain.endsWith(GOOGLE_APIS_DOMAIN_SUFFIX)) {
+      return false; // domains like "my.org" are not Stackdriver built-in metrics.
+    }
+    // All googleapis.com domains except "custom.googleapis.com", "external.googleapis.com",
+    // or "workload.googleapis.com" are built-in metrics.
+    return !SUPPORTED_EXTERNAL_DOMAINS.contains(metricDomain);
+  }
+}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporter.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporter.java
new file mode 100644
index 0000000..4b4c24d
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporter.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.MAX_BATCH_EXPORT_SIZE;
+
+import com.google.api.MonitoredResource;
+import com.google.api.gax.rpc.ApiException;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.common.collect.Lists;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.ProjectName;
+import com.google.monitoring.v3.TimeSeries;
+import io.opencensus.exporter.metrics.util.MetricExporter;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+final class CreateTimeSeriesExporter extends MetricExporter {
+  private static final Tracer tracer = Tracing.getTracer();
+  private static final Logger logger = Logger.getLogger(CreateTimeSeriesExporter.class.getName());
+
+  private final ProjectName projectName;
+  private final MetricServiceClient metricServiceClient;
+  private final MonitoredResource monitoredResource;
+  private final String domain;
+  private final Map<LabelKey, LabelValue> constantLabels;
+
+  CreateTimeSeriesExporter(
+      String projectId,
+      MetricServiceClient metricServiceClient,
+      MonitoredResource monitoredResource,
+      @javax.annotation.Nullable String metricNamePrefix,
+      Map<LabelKey, LabelValue> constantLabels) {
+    projectName = ProjectName.newBuilder().setProject(projectId).build();
+    this.metricServiceClient = metricServiceClient;
+    this.monitoredResource = monitoredResource;
+    this.domain = StackdriverExportUtils.getDomain(metricNamePrefix);
+    this.constantLabels = constantLabels;
+  }
+
+  @Override
+  public void export(Collection<Metric> metrics) {
+    List<TimeSeries> timeSeriesList = new ArrayList<>(metrics.size());
+    for (Metric metric : metrics) {
+      timeSeriesList.addAll(
+          StackdriverExportUtils.createTimeSeriesList(
+              metric, monitoredResource, domain, projectName.getProject(), constantLabels));
+    }
+
+    Span span = tracer.getCurrentSpan();
+    for (List<TimeSeries> batchedTimeSeries :
+        Lists.partition(timeSeriesList, MAX_BATCH_EXPORT_SIZE)) {
+      span.addAnnotation("Export Stackdriver TimeSeries.");
+      try {
+        CreateTimeSeriesRequest request =
+            CreateTimeSeriesRequest.newBuilder()
+                .setName(projectName.toString())
+                .addAllTimeSeries(batchedTimeSeries)
+                .build();
+        metricServiceClient.createTimeSeries(request);
+        span.addAnnotation("Finish exporting TimeSeries.");
+      } catch (ApiException e) {
+        logger.log(Level.WARNING, "ApiException thrown when exporting TimeSeries.", e);
+        span.setStatus(
+            Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
+                .toStatus()
+                .withDescription(
+                    "ApiException thrown when exporting TimeSeries: "
+                        + StackdriverExportUtils.exceptionMessage(e)));
+      } catch (Throwable e) {
+        logger.log(Level.WARNING, "Exception thrown when exporting TimeSeries.", e);
+        span.setStatus(
+            Status.UNKNOWN.withDescription(
+                "Exception thrown when exporting TimeSeries: "
+                    + StackdriverExportUtils.exceptionMessage(e)));
+      }
+    }
+  }
+}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java
index 4f8715b..8a7b6aa 100644
--- a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java
@@ -16,11 +16,10 @@
 
 package io.opencensus.exporter.stats.stackdriver;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import com.google.api.Distribution;
 import com.google.api.Distribution.BucketOptions;
 import com.google.api.Distribution.BucketOptions.Explicit;
+import com.google.api.Distribution.Exemplar;
 import com.google.api.LabelDescriptor;
 import com.google.api.LabelDescriptor.ValueType;
 import com.google.api.Metric;
@@ -29,43 +28,46 @@
 import com.google.api.MonitoredResource;
 import com.google.cloud.MetadataConfig;
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.monitoring.v3.Point;
+import com.google.monitoring.v3.SpanContext;
 import com.google.monitoring.v3.TimeInterval;
 import com.google.monitoring.v3.TimeSeries;
 import com.google.monitoring.v3.TypedValue;
-import com.google.monitoring.v3.TypedValue.Builder;
+import com.google.protobuf.Any;
+import com.google.protobuf.ByteString;
 import com.google.protobuf.Timestamp;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResourceUtils;
-import io.opencensus.contrib.monitoredresource.util.ResourceType;
-import io.opencensus.stats.Aggregation;
-import io.opencensus.stats.Aggregation.LastValue;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.BucketBoundaries;
-import io.opencensus.stats.Measure;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext;
+import io.opencensus.contrib.exemplar.util.ExemplarUtils;
+import io.opencensus.contrib.resource.util.CloudResource;
+import io.opencensus.contrib.resource.util.ContainerResource;
+import io.opencensus.contrib.resource.util.HostResource;
+import io.opencensus.contrib.resource.util.K8sResource;
+import io.opencensus.contrib.resource.util.ResourceUtils;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.Distribution.BucketOptions.ExplicitOptions;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.resource.Resource;
 import java.lang.management.ManagementFactory;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
@@ -73,115 +75,121 @@
 import org.checkerframework.checker.nullness.qual.Nullable;
 */
 
-/** Util methods to convert OpenCensus Stats data models to StackDriver monitoring data models. */
+/** Util methods to convert OpenCensus Metrics data models to StackDriver monitoring data models. */
 @SuppressWarnings("deprecation")
 final class StackdriverExportUtils {
 
-  // TODO(songya): do we want these constants to be customizable?
-  @VisibleForTesting static final String LABEL_DESCRIPTION = "OpenCensus TagKey";
-  @VisibleForTesting static final String OPENCENSUS_TASK = "opencensus_task";
-  @VisibleForTesting static final String OPENCENSUS_TASK_DESCRIPTION = "Opencensus task identifier";
-  private static final String GCP_GKE_CONTAINER = "k8s_container";
+  @VisibleForTesting
+  static final LabelKey OPENCENSUS_TASK_KEY =
+      LabelKey.create("opencensus_task", "Opencensus task identifier");
+
+  @VisibleForTesting
+  static final LabelValue OPENCENSUS_TASK_VALUE_DEFAULT =
+      LabelValue.create(generateDefaultTaskValue());
+
+  static final Map<LabelKey, LabelValue> DEFAULT_CONSTANT_LABELS =
+      Collections.singletonMap(OPENCENSUS_TASK_KEY, OPENCENSUS_TASK_VALUE_DEFAULT);
+
+  @VisibleForTesting static final String STACKDRIVER_PROJECT_ID_KEY = "project_id";
+  @VisibleForTesting static final String DEFAULT_DISPLAY_NAME_PREFIX = "OpenCensus/";
+  @VisibleForTesting static final String CUSTOM_METRIC_DOMAIN = "custom.googleapis.com/";
+
+  @VisibleForTesting
+  static final String CUSTOM_OPENCENSUS_DOMAIN = CUSTOM_METRIC_DOMAIN + "opencensus/";
+  // Stackdriver Monitoring v3 only accepts up to 200 TimeSeries per CreateTimeSeries call.
+  @VisibleForTesting static final int MAX_BATCH_EXPORT_SIZE = 200;
+  private static final String K8S_CONTAINER = "k8s_container";
   private static final String GCP_GCE_INSTANCE = "gce_instance";
   private static final String AWS_EC2_INSTANCE = "aws_ec2_instance";
   private static final String GLOBAL = "global";
+  @VisibleForTesting static final String AWS_REGION_VALUE_PREFIX = "aws:";
 
   private static final Logger logger = Logger.getLogger(StackdriverExportUtils.class.getName());
-  private static final String OPENCENSUS_TASK_VALUE_DEFAULT = generateDefaultTaskValue();
-  private static final String PROJECT_ID_LABEL_KEY = "project_id";
 
-  // Constant functions for ValueType.
-  private static final Function<Object, MetricDescriptor.ValueType> VALUE_TYPE_DOUBLE_FUNCTION =
-      Functions.returnConstant(MetricDescriptor.ValueType.DOUBLE);
-  private static final Function<Object, MetricDescriptor.ValueType> VALUE_TYPE_INT64_FUNCTION =
-      Functions.returnConstant(MetricDescriptor.ValueType.INT64);
-  private static final Function<Object, MetricDescriptor.ValueType>
-      VALUE_TYPE_UNRECOGNIZED_FUNCTION =
-          Functions.returnConstant(MetricDescriptor.ValueType.UNRECOGNIZED);
-  private static final Function<Object, MetricDescriptor.ValueType>
-      VALUE_TYPE_DISTRIBUTION_FUNCTION =
-          Functions.returnConstant(MetricDescriptor.ValueType.DISTRIBUTION);
-  private static final Function<Aggregation, MetricDescriptor.ValueType> valueTypeMeanFunction =
-      new Function<Aggregation, MetricDescriptor.ValueType>() {
-        @Override
-        public MetricDescriptor.ValueType apply(Aggregation arg) {
-          // TODO(songya): remove this once Mean aggregation is completely removed. Before that
-          // we need to continue supporting Mean, since it could still be used by users and some
-          // deprecated RPC views.
-          if (arg instanceof Aggregation.Mean) {
-            return MetricDescriptor.ValueType.DOUBLE;
-          }
-          return MetricDescriptor.ValueType.UNRECOGNIZED;
-        }
-      };
+  // Mappings for the well-known OC resources to applicable Stackdriver resources.
+  private static final Map<String, String> GCP_RESOURCE_MAPPING = getGcpResourceLabelsMappings();
+  private static final Map<String, String> K8S_RESOURCE_MAPPING = getK8sResourceLabelsMappings();
+  private static final Map<String, String> AWS_RESOURCE_MAPPING = getAwsResourceLabelsMappings();
 
-  // Constant functions for MetricKind.
-  private static final Function<Object, MetricKind> METRIC_KIND_CUMULATIVE_FUNCTION =
-      Functions.returnConstant(MetricKind.CUMULATIVE);
-  private static final Function<Object, MetricKind> METRIC_KIND_UNRECOGNIZED_FUNCTION =
-      Functions.returnConstant(MetricKind.UNRECOGNIZED);
+  @VisibleForTesting
+  static final LabelKey PERCENTILE_LABEL_KEY =
+      LabelKey.create("percentile", "the value at a given percentile of a distribution");
+
+  @VisibleForTesting
+  static final String SNAPSHOT_SUFFIX_PERCENTILE = "_summary_snapshot_percentile";
+
+  @VisibleForTesting static final String SUMMARY_SUFFIX_COUNT = "_summary_count";
+  @VisibleForTesting static final String SUMMARY_SUFFIX_SUM = "_summary_sum";
+
+  // Cached project ID only for Exemplar attachments. Without this we'll have to pass the project ID
+  // every time when we convert a Distribution value.
+  @javax.annotation.Nullable private static volatile String cachedProjectIdForExemplar = null;
+
+  @VisibleForTesting
+  static final String EXEMPLAR_ATTACHMENT_TYPE_STRING =
+      "type.googleapis.com/google.protobuf.StringValue";
+
+  @VisibleForTesting
+  static final String EXEMPLAR_ATTACHMENT_TYPE_SPAN_CONTEXT =
+      "type.googleapis.com/google.monitoring.v3.SpanContext";
+
+  // TODO: add support for dropped label attachment.
+  // private static final String EXEMPLAR_ATTACHMENT_TYPE_DROPPED_LABELS =
+  //     "type.googleapis.com/google.monitoring.v3.DroppedLabels";
 
   // Constant functions for TypedValue.
-  private static final Function<SumDataDouble, TypedValue> typedValueSumDoubleFunction =
-      new Function<SumDataDouble, TypedValue>() {
+  private static final Function<Double, TypedValue> typedValueDoubleFunction =
+      new Function<Double, TypedValue>() {
         @Override
-        public TypedValue apply(SumDataDouble arg) {
-          Builder builder = TypedValue.newBuilder();
-          builder.setDoubleValue(arg.getSum());
+        public TypedValue apply(Double arg) {
+          TypedValue.Builder builder = TypedValue.newBuilder();
+          builder.setDoubleValue(arg);
           return builder.build();
         }
       };
-  private static final Function<SumDataLong, TypedValue> typedValueSumLongFunction =
-      new Function<SumDataLong, TypedValue>() {
+  private static final Function<Long, TypedValue> typedValueLongFunction =
+      new Function<Long, TypedValue>() {
         @Override
-        public TypedValue apply(SumDataLong arg) {
-          Builder builder = TypedValue.newBuilder();
-          builder.setInt64Value(arg.getSum());
+        public TypedValue apply(Long arg) {
+          TypedValue.Builder builder = TypedValue.newBuilder();
+          builder.setInt64Value(arg);
           return builder.build();
         }
       };
-  private static final Function<CountData, TypedValue> typedValueCountFunction =
-      new Function<CountData, TypedValue>() {
+  private static final Function<io.opencensus.metrics.export.Distribution, TypedValue>
+      typedValueDistributionFunction =
+          new Function<io.opencensus.metrics.export.Distribution, TypedValue>() {
+            @Override
+            public TypedValue apply(io.opencensus.metrics.export.Distribution arg) {
+              TypedValue.Builder builder = TypedValue.newBuilder();
+              return builder.setDistributionValue(createDistribution(arg)).build();
+            }
+          };
+  private static final Function<Summary, TypedValue> typedValueSummaryFunction =
+      new Function<Summary, TypedValue>() {
         @Override
-        public TypedValue apply(CountData arg) {
-          Builder builder = TypedValue.newBuilder();
-          builder.setInt64Value(arg.getCount());
+        public TypedValue apply(Summary arg) {
+          TypedValue.Builder builder = TypedValue.newBuilder();
           return builder.build();
         }
       };
-  private static final Function<LastValueDataDouble, TypedValue> typedValueLastValueDoubleFunction =
-      new Function<LastValueDataDouble, TypedValue>() {
+
+  // Constant functions for BucketOptions.
+  private static final Function<ExplicitOptions, BucketOptions> bucketOptionsExplicitFunction =
+      new Function<ExplicitOptions, BucketOptions>() {
         @Override
-        public TypedValue apply(LastValueDataDouble arg) {
-          Builder builder = TypedValue.newBuilder();
-          builder.setDoubleValue(arg.getLastValue());
+        public BucketOptions apply(ExplicitOptions arg) {
+          BucketOptions.Builder builder = BucketOptions.newBuilder();
+          Explicit.Builder explicitBuilder = Explicit.newBuilder();
+          // The first bucket bound should be 0.0 because the Metrics first bucket is
+          // [0, first_bound) but Stackdriver monitoring bucket bounds begin with -infinity
+          // (first bucket is (-infinity, 0))
+          explicitBuilder.addBounds(0.0);
+          explicitBuilder.addAllBounds(arg.getBucketBoundaries());
+          builder.setExplicitBuckets(explicitBuilder.build());
           return builder.build();
         }
       };
-  private static final Function<LastValueDataLong, TypedValue> typedValueLastValueLongFunction =
-      new Function<LastValueDataLong, TypedValue>() {
-        @Override
-        public TypedValue apply(LastValueDataLong arg) {
-          Builder builder = TypedValue.newBuilder();
-          builder.setInt64Value(arg.getLastValue());
-          return builder.build();
-        }
-      };
-  private static final Function<AggregationData, TypedValue> typedValueMeanFunction =
-      new Function<AggregationData, TypedValue>() {
-        @Override
-        public TypedValue apply(AggregationData arg) {
-          Builder builder = TypedValue.newBuilder();
-          // TODO(songya): remove this once Mean aggregation is completely removed. Before that
-          // we need to continue supporting Mean, since it could still be used by users and some
-          // deprecated RPC views.
-          if (arg instanceof AggregationData.MeanData) {
-            builder.setDoubleValue(((AggregationData.MeanData) arg).getMean());
-            return builder.build();
-          }
-          throw new IllegalArgumentException("Unknown Aggregation");
-        }
-      };
 
   private static String generateDefaultTaskValue() {
     // Something like '<pid>@<hostname>', at least in Oracle and OpenJdk JVMs
@@ -200,249 +208,276 @@
     return "java-" + jvmName;
   }
 
-  // Construct a MetricDescriptor using a View.
-  @javax.annotation.Nullable
+  // Convert a OpenCensus MetricDescriptor to a StackDriver MetricDescriptor
   static MetricDescriptor createMetricDescriptor(
-      View view, String projectId, String domain, String displayNamePrefix) {
-    if (!(view.getWindow() instanceof View.AggregationWindow.Cumulative)) {
-      // TODO(songya): Only Cumulative view will be exported to Stackdriver in this version.
-      return null;
-    }
+      io.opencensus.metrics.export.MetricDescriptor metricDescriptor,
+      String projectId,
+      String domain,
+      String displayNamePrefix,
+      Map<LabelKey, LabelValue> constantLabels) {
 
     MetricDescriptor.Builder builder = MetricDescriptor.newBuilder();
-    String viewName = view.getName().asString();
-    String type = generateType(viewName, domain);
+    String type = generateType(metricDescriptor.getName(), domain);
     // Name format refers to
     // cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors/create
-    builder.setName(String.format("projects/%s/metricDescriptors/%s", projectId, type));
+    builder.setName("projects/" + projectId + "/metricDescriptors/" + type);
     builder.setType(type);
-    builder.setDescription(view.getDescription());
-    String displayName = createDisplayName(viewName, displayNamePrefix);
-    builder.setDisplayName(displayName);
-    for (TagKey tagKey : view.getColumns()) {
-      builder.addLabels(createLabelDescriptor(tagKey));
+    builder.setDescription(metricDescriptor.getDescription());
+    builder.setDisplayName(createDisplayName(metricDescriptor.getName(), displayNamePrefix));
+    for (LabelKey labelKey : metricDescriptor.getLabelKeys()) {
+      builder.addLabels(createLabelDescriptor(labelKey));
     }
-    builder.addLabels(
-        LabelDescriptor.newBuilder()
-            .setKey(OPENCENSUS_TASK)
-            .setDescription(OPENCENSUS_TASK_DESCRIPTION)
-            .setValueType(ValueType.STRING)
-            .build());
-    builder.setUnit(createUnit(view.getAggregation(), view.getMeasure()));
-    builder.setMetricKind(createMetricKind(view.getWindow(), view.getAggregation()));
-    builder.setValueType(createValueType(view.getAggregation(), view.getMeasure()));
+    for (LabelKey labelKey : constantLabels.keySet()) {
+      builder.addLabels(createLabelDescriptor(labelKey));
+    }
+
+    builder.setUnit(metricDescriptor.getUnit());
+    builder.setMetricKind(createMetricKind(metricDescriptor.getType()));
+    builder.setValueType(createValueType(metricDescriptor.getType()));
     return builder.build();
   }
 
-  private static String generateType(String viewName, String domain) {
-    return domain + viewName;
+  private static String generateType(String metricName, String domain) {
+    return domain + metricName;
   }
 
-  private static String createDisplayName(String viewName, String displayNamePrefix) {
-    return displayNamePrefix + viewName;
+  private static String createDisplayName(String metricName, String displayNamePrefix) {
+    return displayNamePrefix + metricName;
   }
 
-  // Construct a LabelDescriptor from a TagKey
+  // Construct a LabelDescriptor from a LabelKey
   @VisibleForTesting
-  static LabelDescriptor createLabelDescriptor(TagKey tagKey) {
+  static LabelDescriptor createLabelDescriptor(LabelKey labelKey) {
     LabelDescriptor.Builder builder = LabelDescriptor.newBuilder();
-    builder.setKey(tagKey.getName());
-    builder.setDescription(LABEL_DESCRIPTION);
+    builder.setKey(labelKey.getKey());
+    builder.setDescription(labelKey.getDescription());
     // Now we only support String tags
     builder.setValueType(ValueType.STRING);
     return builder.build();
   }
 
-  // Construct a MetricKind from an AggregationWindow
+  // Convert a OpenCensus Type to a StackDriver MetricKind
   @VisibleForTesting
-  static MetricKind createMetricKind(View.AggregationWindow window, Aggregation aggregation) {
-    if (aggregation instanceof LastValue) {
+  static MetricKind createMetricKind(Type type) {
+    if (type == Type.GAUGE_INT64 || type == Type.GAUGE_DOUBLE) {
       return MetricKind.GAUGE;
+    } else if (type == Type.CUMULATIVE_INT64
+        || type == Type.CUMULATIVE_DOUBLE
+        || type == Type.CUMULATIVE_DISTRIBUTION) {
+      return MetricKind.CUMULATIVE;
     }
-    return window.match(
-        METRIC_KIND_CUMULATIVE_FUNCTION, // Cumulative
-        // TODO(songya): We don't support exporting Interval stats to StackDriver in this version.
-        METRIC_KIND_UNRECOGNIZED_FUNCTION, // Interval
-        METRIC_KIND_UNRECOGNIZED_FUNCTION);
+    return MetricKind.UNRECOGNIZED;
   }
 
-  // Construct a MetricDescriptor.ValueType from an Aggregation and a Measure
+  // Convert a OpenCensus Type to a StackDriver ValueType
   @VisibleForTesting
-  static String createUnit(Aggregation aggregation, final Measure measure) {
-    if (aggregation instanceof Aggregation.Count) {
-      return "1";
+  static MetricDescriptor.ValueType createValueType(Type type) {
+    if (type == Type.CUMULATIVE_DOUBLE || type == Type.GAUGE_DOUBLE) {
+      return MetricDescriptor.ValueType.DOUBLE;
+    } else if (type == Type.GAUGE_INT64 || type == Type.CUMULATIVE_INT64) {
+      return MetricDescriptor.ValueType.INT64;
+    } else if (type == Type.GAUGE_DISTRIBUTION || type == Type.CUMULATIVE_DISTRIBUTION) {
+      return MetricDescriptor.ValueType.DISTRIBUTION;
     }
-    return measure.getUnit();
+    return MetricDescriptor.ValueType.UNRECOGNIZED;
   }
 
-  // Construct a MetricDescriptor.ValueType from an Aggregation and a Measure
-  @VisibleForTesting
-  static MetricDescriptor.ValueType createValueType(
-      Aggregation aggregation, final Measure measure) {
-    return aggregation.match(
-        Functions.returnConstant(
-            measure.match(
-                VALUE_TYPE_DOUBLE_FUNCTION, // Sum Double
-                VALUE_TYPE_INT64_FUNCTION, // Sum Long
-                VALUE_TYPE_UNRECOGNIZED_FUNCTION)),
-        VALUE_TYPE_INT64_FUNCTION, // Count
-        VALUE_TYPE_DISTRIBUTION_FUNCTION, // Distribution
-        Functions.returnConstant(
-            measure.match(
-                VALUE_TYPE_DOUBLE_FUNCTION, // LastValue Double
-                VALUE_TYPE_INT64_FUNCTION, // LastValue Long
-                VALUE_TYPE_UNRECOGNIZED_FUNCTION)),
-        valueTypeMeanFunction);
-  }
-
-  // Convert ViewData to a list of TimeSeries, so that ViewData can be uploaded to Stackdriver.
+  // Convert metric's timeseries to a list of TimeSeries, so that metric can be uploaded to
+  // StackDriver.
   static List<TimeSeries> createTimeSeriesList(
-      @javax.annotation.Nullable ViewData viewData,
+      io.opencensus.metrics.export.Metric metric,
       MonitoredResource monitoredResource,
-      String domain) {
+      String domain,
+      String projectId,
+      Map<LabelKey, LabelValue> constantLabels) {
     List<TimeSeries> timeSeriesList = Lists.newArrayList();
-    if (viewData == null) {
-      return timeSeriesList;
-    }
-    View view = viewData.getView();
-    if (!(view.getWindow() instanceof View.AggregationWindow.Cumulative)) {
-      // TODO(songya): Only Cumulative view will be exported to Stackdriver in this version.
-      return timeSeriesList;
+    io.opencensus.metrics.export.MetricDescriptor metricDescriptor = metric.getMetricDescriptor();
+
+    if (!projectId.equals(cachedProjectIdForExemplar)) {
+      cachedProjectIdForExemplar = projectId;
     }
 
-    // Shared fields for all TimeSeries generated from the same ViewData
+    // Shared fields for all TimeSeries generated from the same Metric
     TimeSeries.Builder shared = TimeSeries.newBuilder();
-    shared.setMetricKind(createMetricKind(view.getWindow(), view.getAggregation()));
+    shared.setMetricKind(createMetricKind(metricDescriptor.getType()));
     shared.setResource(monitoredResource);
-    shared.setValueType(createValueType(view.getAggregation(), view.getMeasure()));
+    shared.setValueType(createValueType(metricDescriptor.getType()));
 
-    // Each entry in AggregationMap will be converted into an independent TimeSeries object
-    for (Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
-        viewData.getAggregationMap().entrySet()) {
+    // Each entry in timeSeriesList will be converted into an independent TimeSeries object
+    for (io.opencensus.metrics.export.TimeSeries timeSeries : metric.getTimeSeriesList()) {
+      // TODO(mayurkale): Consider using setPoints instead of builder clone and addPoints.
       TimeSeries.Builder builder = shared.clone();
-      builder.setMetric(createMetric(view, entry.getKey(), domain));
-      builder.addPoints(
-          createPoint(entry.getValue(), viewData.getWindowData(), view.getAggregation()));
+      builder.setMetric(
+          createMetric(metricDescriptor, timeSeries.getLabelValues(), domain, constantLabels));
+
+      io.opencensus.common.Timestamp startTimeStamp = timeSeries.getStartTimestamp();
+      for (io.opencensus.metrics.export.Point point : timeSeries.getPoints()) {
+        builder.addPoints(createPoint(point, startTimeStamp));
+      }
       timeSeriesList.add(builder.build());
     }
-
     return timeSeriesList;
   }
 
-  // Create a Metric using the TagKeys and TagValues.
+  // Create a Metric using the LabelKeys and LabelValues.
   @VisibleForTesting
-  static Metric createMetric(View view, List</*@Nullable*/ TagValue> tagValues, String domain) {
+  static Metric createMetric(
+      io.opencensus.metrics.export.MetricDescriptor metricDescriptor,
+      List<LabelValue> labelValues,
+      String domain,
+      Map<LabelKey, LabelValue> constantLabels) {
     Metric.Builder builder = Metric.newBuilder();
-    // TODO(songya): use pre-defined metrics for canonical views
-    builder.setType(generateType(view.getName().asString(), domain));
+    builder.setType(generateType(metricDescriptor.getName(), domain));
     Map<String, String> stringTagMap = Maps.newHashMap();
-    List<TagKey> columns = view.getColumns();
-    checkArgument(
-        tagValues.size() == columns.size(), "TagKeys and TagValues don't have same size.");
-    for (int i = 0; i < tagValues.size(); i++) {
-      TagKey key = columns.get(i);
-      TagValue value = tagValues.get(i);
+    List<LabelKey> labelKeys = metricDescriptor.getLabelKeys();
+    for (int i = 0; i < labelValues.size(); i++) {
+      String value = labelValues.get(i).getValue();
       if (value == null) {
         continue;
       }
-      stringTagMap.put(key.getName(), value.asString());
+      stringTagMap.put(labelKeys.get(i).getKey(), value);
     }
-    stringTagMap.put(OPENCENSUS_TASK, OPENCENSUS_TASK_VALUE_DEFAULT);
+    for (Map.Entry<LabelKey, LabelValue> constantLabel : constantLabels.entrySet()) {
+      String constantLabelKey = constantLabel.getKey().getKey();
+      String constantLabelValue = constantLabel.getValue().getValue();
+      constantLabelValue = constantLabelValue == null ? "" : constantLabelValue;
+      stringTagMap.put(constantLabelKey, constantLabelValue);
+    }
     builder.putAllLabels(stringTagMap);
     return builder.build();
   }
 
-  // Create Point from AggregationData, AggregationWindowData and Aggregation.
+  // Convert a OpenCensus Point to a StackDriver Point
   @VisibleForTesting
   static Point createPoint(
-      AggregationData aggregationData,
-      ViewData.AggregationWindowData windowData,
-      Aggregation aggregation) {
+      io.opencensus.metrics.export.Point point,
+      @javax.annotation.Nullable io.opencensus.common.Timestamp startTimestamp) {
+    TimeInterval.Builder timeIntervalBuilder = TimeInterval.newBuilder();
+    timeIntervalBuilder.setEndTime(convertTimestamp(point.getTimestamp()));
+    if (startTimestamp != null) {
+      timeIntervalBuilder.setStartTime(convertTimestamp(startTimestamp));
+    }
+
     Point.Builder builder = Point.newBuilder();
-    builder.setInterval(createTimeInterval(windowData, aggregation));
-    builder.setValue(createTypedValue(aggregation, aggregationData));
+    builder.setInterval(timeIntervalBuilder.build());
+    builder.setValue(createTypedValue(point.getValue()));
     return builder.build();
   }
 
-  // Convert AggregationWindowData to TimeInterval, currently only support CumulativeData.
-  @VisibleForTesting
-  static TimeInterval createTimeInterval(
-      ViewData.AggregationWindowData windowData, final Aggregation aggregation) {
-    return windowData.match(
-        new Function<ViewData.AggregationWindowData.CumulativeData, TimeInterval>() {
-          @Override
-          public TimeInterval apply(ViewData.AggregationWindowData.CumulativeData arg) {
-            TimeInterval.Builder builder = TimeInterval.newBuilder();
-            builder.setEndTime(convertTimestamp(arg.getEnd()));
-            if (!(aggregation instanceof LastValue)) {
-              builder.setStartTime(convertTimestamp(arg.getStart()));
-            }
-            return builder.build();
-          }
-        },
-        Functions.<TimeInterval>throwIllegalArgumentException(),
-        Functions.<TimeInterval>throwIllegalArgumentException());
-  }
-
-  // Create a TypedValue using AggregationData and Aggregation
+  // Convert a OpenCensus Value to a StackDriver TypedValue
   // Note TypedValue is "A single strongly-typed value", i.e only one field should be set.
   @VisibleForTesting
-  static TypedValue createTypedValue(
-      final Aggregation aggregation, AggregationData aggregationData) {
-    return aggregationData.match(
-        typedValueSumDoubleFunction,
-        typedValueSumLongFunction,
-        typedValueCountFunction,
-        new Function<DistributionData, TypedValue>() {
-          @Override
-          public TypedValue apply(DistributionData arg) {
-            TypedValue.Builder builder = TypedValue.newBuilder();
-            checkArgument(
-                aggregation instanceof Aggregation.Distribution,
-                "Aggregation and AggregationData mismatch.");
-            builder.setDistributionValue(
-                createDistribution(
-                    arg, ((Aggregation.Distribution) aggregation).getBucketBoundaries()));
-            return builder.build();
-          }
-        },
-        typedValueLastValueDoubleFunction,
-        typedValueLastValueLongFunction,
-        typedValueMeanFunction);
+  static TypedValue createTypedValue(Value value) {
+    return value.match(
+        typedValueDoubleFunction,
+        typedValueLongFunction,
+        typedValueDistributionFunction,
+        typedValueSummaryFunction,
+        Functions.<TypedValue>throwIllegalArgumentException());
   }
 
-  // Create a StackDriver Distribution from DistributionData and BucketBoundaries
+  // Convert a OpenCensus Distribution to a StackDriver Distribution
   @VisibleForTesting
-  static Distribution createDistribution(
-      DistributionData distributionData, BucketBoundaries bucketBoundaries) {
-    return Distribution.newBuilder()
-        .setBucketOptions(createBucketOptions(bucketBoundaries))
-        .addAllBucketCounts(distributionData.getBucketCounts())
-        .setCount(distributionData.getCount())
-        .setMean(distributionData.getMean())
-        // TODO(songya): uncomment this once Stackdriver supports setting max and min.
-        // .setRange(
-        //    Range.newBuilder()
-        //        .setMax(distributionData.getMax())
-        //        .setMin(distributionData.getMin())
-        //        .build())
-        .setSumOfSquaredDeviation(distributionData.getSumOfSquaredDeviations())
+  static Distribution createDistribution(io.opencensus.metrics.export.Distribution distribution) {
+    Distribution.Builder builder =
+        Distribution.newBuilder()
+            .setBucketOptions(createBucketOptions(distribution.getBucketOptions()))
+            .setCount(distribution.getCount())
+            .setMean(
+                distribution.getCount() == 0 ? 0 : distribution.getSum() / distribution.getCount())
+            .setSumOfSquaredDeviation(distribution.getSumOfSquaredDeviations());
+    setBucketCountsAndExemplars(distribution.getBuckets(), builder);
+    return builder.build();
+  }
+
+  // Convert a OpenCensus BucketOptions to a StackDriver BucketOptions
+  @VisibleForTesting
+  static BucketOptions createBucketOptions(
+      @javax.annotation.Nullable
+          io.opencensus.metrics.export.Distribution.BucketOptions bucketOptions) {
+    final BucketOptions.Builder builder = BucketOptions.newBuilder();
+    if (bucketOptions == null) {
+      return builder.build();
+    }
+
+    return bucketOptions.match(
+        bucketOptionsExplicitFunction, Functions.<BucketOptions>throwIllegalArgumentException());
+  }
+
+  // Convert OpenCensus Buckets to a list of bucket counts and a list of proto Exemplars, then set
+  // them to the builder.
+  private static void setBucketCountsAndExemplars(
+      List<Bucket> buckets, Distribution.Builder builder) {
+    // The first bucket (underflow bucket) should always be 0 count because the Metrics first bucket
+    // is [0, first_bound) but StackDriver distribution consists of an underflow bucket (number 0).
+    builder.addBucketCounts(0L);
+    for (Bucket bucket : buckets) {
+      builder.addBucketCounts(bucket.getCount());
+      @javax.annotation.Nullable
+      io.opencensus.metrics.data.Exemplar exemplar = bucket.getExemplar();
+      if (exemplar != null) {
+        builder.addExemplars(toProtoExemplar(exemplar));
+      }
+    }
+  }
+
+  private static Exemplar toProtoExemplar(io.opencensus.metrics.data.Exemplar exemplar) {
+    Exemplar.Builder builder =
+        Exemplar.newBuilder()
+            .setValue(exemplar.getValue())
+            .setTimestamp(convertTimestamp(exemplar.getTimestamp()));
+    @javax.annotation.Nullable io.opencensus.trace.SpanContext spanContext = null;
+    for (Map.Entry<String, AttachmentValue> attachment : exemplar.getAttachments().entrySet()) {
+      String key = attachment.getKey();
+      AttachmentValue value = attachment.getValue();
+      if (ExemplarUtils.ATTACHMENT_KEY_SPAN_CONTEXT.equals(key)) {
+        spanContext = ((AttachmentValueSpanContext) value).getSpanContext();
+      } else { // Everything else will be treated as plain strings for now.
+        builder.addAttachments(toProtoStringAttachment(value));
+      }
+    }
+    if (spanContext != null && cachedProjectIdForExemplar != null) {
+      SpanContext protoSpanContext = toProtoSpanContext(spanContext, cachedProjectIdForExemplar);
+      builder.addAttachments(toProtoSpanContextAttachment(protoSpanContext));
+    }
+    return builder.build();
+  }
+
+  private static Any toProtoStringAttachment(AttachmentValue attachmentValue) {
+    return Any.newBuilder()
+        .setTypeUrl(EXEMPLAR_ATTACHMENT_TYPE_STRING)
+        .setValue(ByteString.copyFromUtf8(attachmentValue.getValue()))
         .build();
   }
 
-  // Create BucketOptions from BucketBoundaries
-  @VisibleForTesting
-  static BucketOptions createBucketOptions(BucketBoundaries bucketBoundaries) {
-    return BucketOptions.newBuilder()
-        .setExplicitBuckets(Explicit.newBuilder().addAllBounds(bucketBoundaries.getBoundaries()))
+  private static Any toProtoSpanContextAttachment(SpanContext protoSpanContext) {
+    return Any.newBuilder()
+        .setTypeUrl(EXEMPLAR_ATTACHMENT_TYPE_SPAN_CONTEXT)
+        .setValue(protoSpanContext.toByteString())
         .build();
   }
 
-  // Convert a Census Timestamp to a StackDriver Timestamp
+  private static SpanContext toProtoSpanContext(
+      io.opencensus.trace.SpanContext spanContext, String projectId) {
+    String spanName =
+        String.format(
+            "projects/%s/traces/%s/spans/%s",
+            projectId,
+            spanContext.getTraceId().toLowerBase16(),
+            spanContext.getSpanId().toLowerBase16());
+    return SpanContext.newBuilder().setSpanName(spanName).build();
+  }
+
+  @VisibleForTesting
+  static void setCachedProjectIdForExemplar(@javax.annotation.Nullable String projectId) {
+    cachedProjectIdForExemplar = projectId;
+  }
+
+  // Convert a OpenCensus Timestamp to a StackDriver Timestamp
   @VisibleForTesting
   static Timestamp convertTimestamp(io.opencensus.common.Timestamp censusTimestamp) {
     if (censusTimestamp.getSeconds() < 0) {
-      // Stackdriver doesn't handle negative timestamps.
+      // StackDriver doesn't handle negative timestamps.
       return Timestamp.newBuilder().build();
     }
     return Timestamp.newBuilder()
@@ -451,68 +486,247 @@
         .build();
   }
 
-  /* Return a self-configured Stackdriver monitored resource. */
+  /* Return a self-configured StackDriver monitored resource. */
   static MonitoredResource getDefaultResource() {
     MonitoredResource.Builder builder = MonitoredResource.newBuilder();
-    io.opencensus.contrib.monitoredresource.util.MonitoredResource autoDetectedResource =
-        MonitoredResourceUtils.getDefaultResource();
-    if (autoDetectedResource == null) {
+    // Populate internal resource label for defaulting project_id label.
+    // This allows stats from other projects (e.g from GAE running in another project) to be
+    // collected.
+    if (MetadataConfig.getProjectId() != null) {
+      builder.putLabels(STACKDRIVER_PROJECT_ID_KEY, MetadataConfig.getProjectId());
+    }
+
+    Resource autoDetectedResource = ResourceUtils.detectResource();
+    if (autoDetectedResource == null || autoDetectedResource.getType() == null) {
       builder.setType(GLOBAL);
-      if (MetadataConfig.getProjectId() != null) {
-        // For default global resource, always use the project id from MetadataConfig. This allows
-        // stats from other projects (e.g from GAE running in another project) to be collected.
-        builder.putLabels(PROJECT_ID_LABEL_KEY, MetadataConfig.getProjectId());
-      }
       return builder.build();
     }
-    builder.setType(mapToStackdriverResourceType(autoDetectedResource.getResourceType()));
-    setMonitoredResourceLabelsForBuilder(builder, autoDetectedResource);
+
+    setResourceForBuilder(builder, autoDetectedResource);
     return builder.build();
   }
 
-  private static String mapToStackdriverResourceType(ResourceType resourceType) {
-    switch (resourceType) {
-      case GCP_GCE_INSTANCE:
-        return GCP_GCE_INSTANCE;
-      case GCP_GKE_CONTAINER:
-        return GCP_GKE_CONTAINER;
-      case AWS_EC2_INSTANCE:
-        return AWS_EC2_INSTANCE;
+  @VisibleForTesting
+  static void setResourceForBuilder(
+      MonitoredResource.Builder builder, Resource autoDetectedResource) {
+    String type = autoDetectedResource.getType();
+    if (type == null) {
+      return;
     }
-    throw new IllegalArgumentException("Unknown resource type.");
+    String sdType = GLOBAL;
+
+    Map<String, String> mappings = null;
+    if (HostResource.TYPE.equals(type)) {
+      String provider = autoDetectedResource.getLabels().get(CloudResource.PROVIDER_KEY);
+      if (CloudResource.PROVIDER_GCP.equals(provider)) {
+        sdType = GCP_GCE_INSTANCE;
+        mappings = GCP_RESOURCE_MAPPING;
+      } else if (CloudResource.PROVIDER_AWS.equals(provider)) {
+        sdType = AWS_EC2_INSTANCE;
+        mappings = AWS_RESOURCE_MAPPING;
+      }
+    } else if (ContainerResource.TYPE.equals(type)) {
+      sdType = K8S_CONTAINER;
+      mappings = K8S_RESOURCE_MAPPING;
+    }
+
+    builder.setType(sdType);
+
+    if (GLOBAL.equals(sdType) || mappings == null) {
+      return;
+    }
+
+    Map<String, String> resLabels = autoDetectedResource.getLabels();
+    for (Map.Entry<String, String> entry : mappings.entrySet()) {
+      if (entry.getValue() != null && resLabels.containsKey(entry.getValue())) {
+        String resourceLabelKey = entry.getKey();
+        String resourceLabelValue = resLabels.get(entry.getValue());
+        if (AWS_EC2_INSTANCE.equals(sdType) && "region".equals(resourceLabelKey)) {
+          // Add "aws:" prefix to AWS EC2 region label. This is Stackdriver specific requirement.
+          resourceLabelValue = AWS_REGION_VALUE_PREFIX + resourceLabelValue;
+        }
+        builder.putLabels(resourceLabelKey, resourceLabelValue);
+      }
+    }
   }
 
-  private static void setMonitoredResourceLabelsForBuilder(
-      MonitoredResource.Builder builder,
-      io.opencensus.contrib.monitoredresource.util.MonitoredResource autoDetectedResource) {
-    switch (autoDetectedResource.getResourceType()) {
-      case GCP_GCE_INSTANCE:
-        GcpGceInstanceMonitoredResource gcpGceInstanceMonitoredResource =
-            (GcpGceInstanceMonitoredResource) autoDetectedResource;
-        builder.putLabels(PROJECT_ID_LABEL_KEY, gcpGceInstanceMonitoredResource.getAccount());
-        builder.putLabels("instance_id", gcpGceInstanceMonitoredResource.getInstanceId());
-        builder.putLabels("zone", gcpGceInstanceMonitoredResource.getZone());
-        return;
-      case GCP_GKE_CONTAINER:
-        GcpGkeContainerMonitoredResource gcpGkeContainerMonitoredResource =
-            (GcpGkeContainerMonitoredResource) autoDetectedResource;
-        builder.putLabels(PROJECT_ID_LABEL_KEY, gcpGkeContainerMonitoredResource.getAccount());
-        builder.putLabels("cluster_name", gcpGkeContainerMonitoredResource.getClusterName());
-        builder.putLabels("container_name", gcpGkeContainerMonitoredResource.getContainerName());
-        builder.putLabels("namespace_name", gcpGkeContainerMonitoredResource.getNamespaceId());
-        builder.putLabels("pod_name", gcpGkeContainerMonitoredResource.getPodId());
-        builder.putLabels("location", gcpGkeContainerMonitoredResource.getZone());
-        return;
-      case AWS_EC2_INSTANCE:
-        AwsEc2InstanceMonitoredResource awsEc2InstanceMonitoredResource =
-            (AwsEc2InstanceMonitoredResource) autoDetectedResource;
-        builder.putLabels("aws_account", awsEc2InstanceMonitoredResource.getAccount());
-        builder.putLabels("instance_id", awsEc2InstanceMonitoredResource.getInstanceId());
-        builder.putLabels("region", "aws:" + awsEc2InstanceMonitoredResource.getRegion());
-        return;
+  @VisibleForTesting
+  static List<io.opencensus.metrics.export.Metric> convertSummaryMetric(
+      io.opencensus.metrics.export.Metric summaryMetric) {
+    List<io.opencensus.metrics.export.Metric> metricsList = Lists.newArrayList();
+    final List<io.opencensus.metrics.export.TimeSeries> percentileTimeSeries = new ArrayList<>();
+    final List<io.opencensus.metrics.export.TimeSeries> summaryCountTimeSeries = new ArrayList<>();
+    final List<io.opencensus.metrics.export.TimeSeries> summarySumTimeSeries = new ArrayList<>();
+    for (final io.opencensus.metrics.export.TimeSeries timeSeries :
+        summaryMetric.getTimeSeriesList()) {
+      final List<LabelValue> labelValuesWithPercentile =
+          new ArrayList<>(timeSeries.getLabelValues());
+      final io.opencensus.common.Timestamp timeSeriesTimestamp = timeSeries.getStartTimestamp();
+      for (io.opencensus.metrics.export.Point point : timeSeries.getPoints()) {
+        final io.opencensus.common.Timestamp pointTimestamp = point.getTimestamp();
+        point
+            .getValue()
+            .match(
+                Functions.<Void>returnNull(),
+                Functions.<Void>returnNull(),
+                Functions.<Void>returnNull(),
+                new Function<Summary, Void>() {
+                  @Override
+                  public Void apply(Summary summary) {
+                    Long count = summary.getCount();
+                    if (count != null) {
+                      createTimeSeries(
+                          timeSeries.getLabelValues(),
+                          Value.longValue(count),
+                          pointTimestamp,
+                          timeSeriesTimestamp,
+                          summaryCountTimeSeries);
+                    }
+                    Double sum = summary.getSum();
+                    if (sum != null) {
+                      createTimeSeries(
+                          timeSeries.getLabelValues(),
+                          Value.doubleValue(sum),
+                          pointTimestamp,
+                          timeSeriesTimestamp,
+                          summarySumTimeSeries);
+                    }
+                    Snapshot snapshot = summary.getSnapshot();
+                    for (ValueAtPercentile valueAtPercentile : snapshot.getValueAtPercentiles()) {
+                      labelValuesWithPercentile.add(
+                          LabelValue.create(valueAtPercentile.getPercentile() + ""));
+                      createTimeSeries(
+                          labelValuesWithPercentile,
+                          Value.doubleValue(valueAtPercentile.getValue()),
+                          pointTimestamp,
+                          null,
+                          percentileTimeSeries);
+                      labelValuesWithPercentile.remove(labelValuesWithPercentile.size() - 1);
+                    }
+                    return null;
+                  }
+                },
+                Functions.<Void>returnNull());
+      }
     }
-    throw new IllegalArgumentException("Unknown subclass of MonitoredResource.");
+
+    // Metric for summary->count.
+    if (summaryCountTimeSeries.size() > 0) {
+      addMetric(
+          metricsList,
+          io.opencensus.metrics.export.MetricDescriptor.create(
+              summaryMetric.getMetricDescriptor().getName() + SUMMARY_SUFFIX_COUNT,
+              summaryMetric.getMetricDescriptor().getDescription(),
+              "1",
+              Type.CUMULATIVE_INT64,
+              summaryMetric.getMetricDescriptor().getLabelKeys()),
+          summaryCountTimeSeries);
+    }
+
+    // Metric for summary->sum.
+    if (summarySumTimeSeries.size() > 0) {
+      addMetric(
+          metricsList,
+          io.opencensus.metrics.export.MetricDescriptor.create(
+              summaryMetric.getMetricDescriptor().getName() + SUMMARY_SUFFIX_SUM,
+              summaryMetric.getMetricDescriptor().getDescription(),
+              summaryMetric.getMetricDescriptor().getUnit(),
+              Type.CUMULATIVE_DOUBLE,
+              summaryMetric.getMetricDescriptor().getLabelKeys()),
+          summarySumTimeSeries);
+    }
+
+    // Metric for summary->snapshot->percentiles.
+    List<LabelKey> labelKeys = new ArrayList<>(summaryMetric.getMetricDescriptor().getLabelKeys());
+    labelKeys.add(PERCENTILE_LABEL_KEY);
+    addMetric(
+        metricsList,
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            summaryMetric.getMetricDescriptor().getName() + SNAPSHOT_SUFFIX_PERCENTILE,
+            summaryMetric.getMetricDescriptor().getDescription(),
+            summaryMetric.getMetricDescriptor().getUnit(),
+            Type.GAUGE_DOUBLE,
+            labelKeys),
+        percentileTimeSeries);
+    return metricsList;
+  }
+
+  private static void addMetric(
+      List<io.opencensus.metrics.export.Metric> metricsList,
+      io.opencensus.metrics.export.MetricDescriptor metricDescriptor,
+      List<io.opencensus.metrics.export.TimeSeries> timeSeriesList) {
+    metricsList.add(io.opencensus.metrics.export.Metric.create(metricDescriptor, timeSeriesList));
+  }
+
+  private static void createTimeSeries(
+      List<LabelValue> labelValues,
+      Value value,
+      io.opencensus.common.Timestamp pointTimestamp,
+      @javax.annotation.Nullable io.opencensus.common.Timestamp timeSeriesTimestamp,
+      List<io.opencensus.metrics.export.TimeSeries> timeSeriesList) {
+    timeSeriesList.add(
+        io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+            labelValues,
+            io.opencensus.metrics.export.Point.create(value, pointTimestamp),
+            timeSeriesTimestamp));
+  }
+
+  private static Map<String, String> getGcpResourceLabelsMappings() {
+    Map<String, String> resourceLabels = new LinkedHashMap<String, String>();
+    resourceLabels.put("project_id", STACKDRIVER_PROJECT_ID_KEY);
+    resourceLabels.put("instance_id", HostResource.ID_KEY);
+    resourceLabels.put("zone", CloudResource.ZONE_KEY);
+    return Collections.unmodifiableMap(resourceLabels);
+  }
+
+  private static Map<String, String> getK8sResourceLabelsMappings() {
+    Map<String, String> resourceLabels = new LinkedHashMap<String, String>();
+    resourceLabels.put("project_id", STACKDRIVER_PROJECT_ID_KEY);
+    resourceLabels.put("location", CloudResource.ZONE_KEY);
+    resourceLabels.put("cluster_name", K8sResource.CLUSTER_NAME_KEY);
+    resourceLabels.put("namespace_name", K8sResource.NAMESPACE_NAME_KEY);
+    resourceLabels.put("pod_name", K8sResource.POD_NAME_KEY);
+    resourceLabels.put("container_name", ContainerResource.NAME_KEY);
+    return Collections.unmodifiableMap(resourceLabels);
+  }
+
+  private static Map<String, String> getAwsResourceLabelsMappings() {
+    Map<String, String> resourceLabels = new LinkedHashMap<String, String>();
+    resourceLabels.put("project_id", STACKDRIVER_PROJECT_ID_KEY);
+    resourceLabels.put("instance_id", HostResource.ID_KEY);
+    resourceLabels.put("region", CloudResource.REGION_KEY);
+    resourceLabels.put("aws_account", CloudResource.ACCOUNT_ID_KEY);
+    return Collections.unmodifiableMap(resourceLabels);
   }
 
   private StackdriverExportUtils() {}
+
+  static String exceptionMessage(Throwable e) {
+    return e.getMessage() != null ? e.getMessage() : e.getClass().getName();
+  }
+
+  static String getDomain(@javax.annotation.Nullable String metricNamePrefix) {
+    String domain;
+    if (Strings.isNullOrEmpty(metricNamePrefix)) {
+      domain = CUSTOM_OPENCENSUS_DOMAIN;
+    } else {
+      if (!metricNamePrefix.endsWith("/")) {
+        domain = metricNamePrefix + '/';
+      } else {
+        domain = metricNamePrefix;
+      }
+    }
+    return domain;
+  }
+
+  static String getDisplayNamePrefix(@javax.annotation.Nullable String metricNamePrefix) {
+    if (metricNamePrefix == null) {
+      return DEFAULT_DISPLAY_NAME_PREFIX;
+    } else {
+      if (!metricNamePrefix.endsWith("/") && !metricNamePrefix.isEmpty()) {
+        metricNamePrefix += '/';
+      }
+      return metricNamePrefix;
+    }
+  }
 }
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java
deleted file mode 100644
index 5ffed9d..0000000
--- a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.exporter.stats.stackdriver;
-
-import com.google.api.MetricDescriptor;
-import com.google.api.MonitoredResource;
-import com.google.api.gax.rpc.ApiException;
-import com.google.cloud.monitoring.v3.MetricServiceClient;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.monitoring.v3.CreateMetricDescriptorRequest;
-import com.google.monitoring.v3.CreateTimeSeriesRequest;
-import com.google.monitoring.v3.ProjectName;
-import com.google.monitoring.v3.TimeSeries;
-import io.opencensus.common.Duration;
-import io.opencensus.common.Scope;
-import io.opencensus.stats.View;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewManager;
-import io.opencensus.trace.Sampler;
-import io.opencensus.trace.Span;
-import io.opencensus.trace.Status;
-import io.opencensus.trace.Tracer;
-import io.opencensus.trace.Tracing;
-import io.opencensus.trace.samplers.Samplers;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import javax.annotation.concurrent.NotThreadSafe;
-
-/*>>>
-import org.checkerframework.checker.nullness.qual.Nullable;
-*/
-
-/**
- * Worker {@code Runnable} that polls ViewData from Stats library and batch export to StackDriver.
- *
- * <p>{@code StackdriverExporterWorker} will be started in a daemon {@code Thread}.
- *
- * <p>The state of this class should only be accessed from the thread which {@link
- * StackdriverExporterWorker} resides in.
- */
-@NotThreadSafe
-final class StackdriverExporterWorker implements Runnable {
-
-  private static final Logger logger = Logger.getLogger(StackdriverExporterWorker.class.getName());
-
-  // Stackdriver Monitoring v3 only accepts up to 200 TimeSeries per CreateTimeSeries call.
-  @VisibleForTesting static final int MAX_BATCH_EXPORT_SIZE = 200;
-
-  @VisibleForTesting static final String DEFAULT_DISPLAY_NAME_PREFIX = "OpenCensus/";
-  @VisibleForTesting static final String CUSTOM_METRIC_DOMAIN = "custom.googleapis.com/";
-
-  @VisibleForTesting
-  static final String CUSTOM_OPENCENSUS_DOMAIN = CUSTOM_METRIC_DOMAIN + "opencensus/";
-
-  private final long scheduleDelayMillis;
-  private final String projectId;
-  private final ProjectName projectName;
-  private final MetricServiceClient metricServiceClient;
-  private final ViewManager viewManager;
-  private final MonitoredResource monitoredResource;
-  private final String domain;
-  private final String displayNamePrefix;
-  private final Map<View.Name, View> registeredViews = new HashMap<View.Name, View>();
-
-  private static final Tracer tracer = Tracing.getTracer();
-  private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
-
-  StackdriverExporterWorker(
-      String projectId,
-      MetricServiceClient metricServiceClient,
-      Duration exportInterval,
-      ViewManager viewManager,
-      MonitoredResource monitoredResource,
-      @javax.annotation.Nullable String metricNamePrefix) {
-    this.scheduleDelayMillis = exportInterval.toMillis();
-    this.projectId = projectId;
-    projectName = ProjectName.newBuilder().setProject(projectId).build();
-    this.metricServiceClient = metricServiceClient;
-    this.viewManager = viewManager;
-    this.monitoredResource = monitoredResource;
-    this.domain = getDomain(metricNamePrefix);
-    this.displayNamePrefix = getDisplayNamePrefix(metricNamePrefix);
-
-    Tracing.getExportComponent()
-        .getSampledSpanStore()
-        .registerSpanNamesForCollection(
-            Collections.singletonList("ExportStatsToStackdriverMonitoring"));
-  }
-
-  // Returns true if the given view is successfully registered to Stackdriver Monitoring, or the
-  // exact same view has already been registered. Returns false otherwise.
-  @VisibleForTesting
-  boolean registerView(View view) {
-    View existing = registeredViews.get(view.getName());
-    if (existing != null) {
-      if (existing.equals(view)) {
-        // Ignore views that are already registered.
-        return true;
-      } else {
-        // If we upload a view that has the same name with a registered view but with different
-        // attributes, Stackdriver client will throw an exception.
-        logger.log(
-            Level.WARNING,
-            "A different view with the same name is already registered: " + existing);
-        return false;
-      }
-    }
-    registeredViews.put(view.getName(), view);
-
-    Span span = tracer.getCurrentSpan();
-    span.addAnnotation("Create Stackdriver Metric.");
-    // TODO(songya): don't need to create MetricDescriptor for RpcViewConstants once we defined
-    // canonical metrics. Registration is required only for custom view definitions. Canonical
-    // views should be pre-registered.
-    MetricDescriptor metricDescriptor =
-        StackdriverExportUtils.createMetricDescriptor(view, projectId, domain, displayNamePrefix);
-    if (metricDescriptor == null) {
-      // Don't register interval views in this version.
-      return false;
-    }
-
-    CreateMetricDescriptorRequest request =
-        CreateMetricDescriptorRequest.newBuilder()
-            .setName(projectName.toString())
-            .setMetricDescriptor(metricDescriptor)
-            .build();
-    try {
-      metricServiceClient.createMetricDescriptor(request);
-      span.addAnnotation("Finish creating MetricDescriptor.");
-      return true;
-    } catch (ApiException e) {
-      logger.log(Level.WARNING, "ApiException thrown when creating MetricDescriptor.", e);
-      span.setStatus(
-          Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
-              .toStatus()
-              .withDescription(
-                  "ApiException thrown when creating MetricDescriptor: " + exceptionMessage(e)));
-      return false;
-    } catch (Throwable e) {
-      logger.log(Level.WARNING, "Exception thrown when creating MetricDescriptor.", e);
-      span.setStatus(
-          Status.UNKNOWN.withDescription(
-              "Exception thrown when creating MetricDescriptor: " + exceptionMessage(e)));
-      return false;
-    }
-  }
-
-  // Polls ViewData from Stats library for all exported views, and upload them as TimeSeries to
-  // StackDriver.
-  @VisibleForTesting
-  void export() {
-    List</*@Nullable*/ ViewData> viewDataList = Lists.newArrayList();
-    for (View view : viewManager.getAllExportedViews()) {
-      if (registerView(view)) {
-        // Only upload stats for valid views.
-        viewDataList.add(viewManager.getView(view.getName()));
-      }
-    }
-
-    List<TimeSeries> timeSeriesList = Lists.newArrayList();
-    for (/*@Nullable*/ ViewData viewData : viewDataList) {
-      timeSeriesList.addAll(
-          StackdriverExportUtils.createTimeSeriesList(viewData, monitoredResource, domain));
-    }
-    for (List<TimeSeries> batchedTimeSeries :
-        Lists.partition(timeSeriesList, MAX_BATCH_EXPORT_SIZE)) {
-      Span span = tracer.getCurrentSpan();
-      span.addAnnotation("Export Stackdriver TimeSeries.");
-      try {
-        CreateTimeSeriesRequest request =
-            CreateTimeSeriesRequest.newBuilder()
-                .setName(projectName.toString())
-                .addAllTimeSeries(batchedTimeSeries)
-                .build();
-        metricServiceClient.createTimeSeries(request);
-        span.addAnnotation("Finish exporting TimeSeries.");
-      } catch (ApiException e) {
-        logger.log(Level.WARNING, "ApiException thrown when exporting TimeSeries.", e);
-        span.setStatus(
-            Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
-                .toStatus()
-                .withDescription(
-                    "ApiException thrown when exporting TimeSeries: " + exceptionMessage(e)));
-      } catch (Throwable e) {
-        logger.log(Level.WARNING, "Exception thrown when exporting TimeSeries.", e);
-        span.setStatus(
-            Status.UNKNOWN.withDescription(
-                "Exception thrown when exporting TimeSeries: " + exceptionMessage(e)));
-      }
-    }
-  }
-
-  @Override
-  public void run() {
-    while (true) {
-      Span span =
-          tracer
-              .spanBuilder("ExportStatsToStackdriverMonitoring")
-              .setRecordEvents(true)
-              .setSampler(probabilitySampler)
-              .startSpan();
-      Scope scope = tracer.withSpan(span);
-      try {
-        export();
-      } catch (Throwable e) {
-        logger.log(Level.WARNING, "Exception thrown by the Stackdriver stats exporter.", e);
-        span.setStatus(
-            Status.UNKNOWN.withDescription(
-                "Exception from Stackdriver Exporter: " + exceptionMessage(e)));
-      } finally {
-        scope.close();
-        span.end();
-      }
-      try {
-        Thread.sleep(scheduleDelayMillis);
-      } catch (InterruptedException ie) {
-        // Preserve the interruption status as per guidance and stop doing any work.
-        Thread.currentThread().interrupt();
-        return;
-      }
-    }
-  }
-
-  private static String exceptionMessage(Throwable e) {
-    return e.getMessage() != null ? e.getMessage() : e.getClass().getName();
-  }
-
-  @VisibleForTesting
-  static String getDomain(@javax.annotation.Nullable String metricNamePrefix) {
-    String domain;
-    if (Strings.isNullOrEmpty(metricNamePrefix)) {
-      domain = CUSTOM_OPENCENSUS_DOMAIN;
-    } else {
-      if (!metricNamePrefix.endsWith("/")) {
-        domain = metricNamePrefix + '/';
-      } else {
-        domain = metricNamePrefix;
-      }
-    }
-    return domain;
-  }
-
-  @VisibleForTesting
-  static String getDisplayNamePrefix(@javax.annotation.Nullable String metricNamePrefix) {
-    if (metricNamePrefix == null) {
-      return DEFAULT_DISPLAY_NAME_PREFIX;
-    } else {
-      if (!metricNamePrefix.endsWith("/") && !metricNamePrefix.isEmpty()) {
-        metricNamePrefix += '/';
-      }
-      return metricNamePrefix;
-    }
-  }
-}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java
index c4008ca..63598b9 100644
--- a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java
@@ -16,10 +16,22 @@
 
 package io.opencensus.exporter.stats.stackdriver;
 
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+
 import com.google.api.MonitoredResource;
 import com.google.auth.Credentials;
 import com.google.auto.value.AutoValue;
+import com.google.cloud.ServiceOptions;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 import io.opencensus.common.Duration;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
 
@@ -32,6 +44,12 @@
 @Immutable
 public abstract class StackdriverStatsConfiguration {
 
+  static final Duration DEFAULT_INTERVAL = Duration.create(60, 0);
+  static final MonitoredResource DEFAULT_RESOURCE = StackdriverExportUtils.getDefaultResource();
+  static final String DEFAULT_PROJECT_ID =
+      Strings.nullToEmpty(ServiceOptions.getDefaultProjectId());
+  static final Duration DEFAULT_DEADLINE = Duration.create(60, 0);
+
   StackdriverStatsConfiguration() {}
 
   /**
@@ -49,7 +67,6 @@
    * @return the project id.
    * @since 0.11
    */
-  @Nullable
   public abstract String getProjectId();
 
   /**
@@ -58,7 +75,6 @@
    * @return the export interval.
    * @since 0.11
    */
-  @Nullable
   public abstract Duration getExportInterval();
 
   /**
@@ -67,7 +83,6 @@
    * @return the {@code MonitoredResource}.
    * @since 0.11
    */
-  @Nullable
   public abstract MonitoredResource getMonitoredResource();
 
   /**
@@ -80,13 +95,55 @@
   public abstract String getMetricNamePrefix();
 
   /**
+   * Returns the display name prefix for Stackdriver metrics.
+   *
+   * @return the metric display name prefix.
+   * @since 0.27
+   */
+  @Nullable
+  public abstract String getDisplayNamePrefix();
+
+  /**
+   * Returns the constant labels that will be applied to every Stackdriver metric.
+   *
+   * @return the constant labels.
+   * @since 0.21
+   */
+  public abstract Map<LabelKey, LabelValue> getConstantLabels();
+
+  /**
+   * Returns the deadline for exporting to Stackdriver Monitoring backend.
+   *
+   * <p>Default value is 10 seconds if not set.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Returns the {@link MetricServiceStub} to be used to make calls to Stackdriver Monitoring v3
+   * APIs. This is for for advanced usage.
+   *
+   * @return the {@code MetricServiceStub}.
+   * @since 0.22
+   */
+  @Nullable
+  public abstract MetricServiceStub getMetricServiceStub();
+
+  /**
    * Returns a new {@link Builder}.
    *
    * @return a {@code Builder}.
    * @since 0.11
    */
   public static Builder builder() {
-    return new AutoValue_StackdriverStatsConfiguration.Builder();
+    return new AutoValue_StackdriverStatsConfiguration.Builder()
+        .setProjectId(DEFAULT_PROJECT_ID)
+        .setConstantLabels(DEFAULT_CONSTANT_LABELS)
+        .setExportInterval(DEFAULT_INTERVAL)
+        .setMonitoredResource(DEFAULT_RESOURCE)
+        .setDeadline(DEFAULT_DEADLINE);
   }
 
   /**
@@ -97,6 +154,8 @@
   @AutoValue.Builder
   public abstract static class Builder {
 
+    @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
     Builder() {}
 
     /**
@@ -149,11 +208,84 @@
     public abstract Builder setMetricNamePrefix(String prefix);
 
     /**
+     * Sets the the display name prefix for Stackdriver metrics.
+     *
+     * @param prefix the metric display name prefix.
+     * @return this.
+     * @since 0.27
+     */
+    public abstract Builder setDisplayNamePrefix(String prefix);
+
+    /**
+     * Sets the constant labels that will be applied to every Stackdriver metric. This default
+     * ensures that the set of labels together with the default resource (global) are unique to this
+     * process, as required by stackdriver.
+     *
+     * <p>If not set, the exporter will use the "opencensus_task" label.
+     *
+     * <p>If you set constant labels, make sure that the monitored resource together with these
+     * labels is unique to the current process. This is to ensure that there is only a single writer
+     * to each time series in Stackdriver.
+     *
+     * <p>Set constant labels to empty to avoid getting the default "opencensus_task" label. You
+     * should only do this if you know that the monitored resource uniquely identifies this process.
+     *
+     * @param constantLabels constant labels that will be applied to every Stackdriver metric.
+     * @return this
+     * @since 0.21
+     */
+    public abstract Builder setConstantLabels(Map<LabelKey, LabelValue> constantLabels);
+
+    /**
+     * Sets the deadline for exporting to Stackdriver Monitoring backend.
+     *
+     * <p>If both {@code MetricServiceStub} and {@code Deadline} are set, {@code MetricServiceStub}
+     * takes precedence and {@code Deadline} will not be respected.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    /**
+     * Sets the {@link MetricServiceStub} to be used to make calls to Stackdriver Monitoring v3
+     * APIs. This is for for advanced usage.
+     *
+     * @param stub the {@code MetricServiceStub}.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setMetricServiceStub(MetricServiceStub stub);
+
+    abstract String getProjectId();
+
+    abstract Map<LabelKey, LabelValue> getConstantLabels();
+
+    abstract Duration getDeadline();
+
+    abstract StackdriverStatsConfiguration autoBuild();
+
+    /**
      * Builds a new {@link StackdriverStatsConfiguration} with current settings.
      *
      * @return a {@code StackdriverStatsConfiguration}.
      * @since 0.11
      */
-    public abstract StackdriverStatsConfiguration build();
+    public StackdriverStatsConfiguration build() {
+      // Make a defensive copy of constant labels.
+      setConstantLabels(
+          Collections.unmodifiableMap(
+              new LinkedHashMap<LabelKey, LabelValue>(getConstantLabels())));
+      Preconditions.checkArgument(
+          !Strings.isNullOrEmpty(getProjectId()),
+          "Cannot find a project ID from either configurations or application default.");
+      for (Map.Entry<LabelKey, LabelValue> constantLabel : getConstantLabels().entrySet()) {
+        Preconditions.checkNotNull(constantLabel.getKey(), "constant label key");
+        Preconditions.checkNotNull(constantLabel.getValue(), "constant label value");
+      }
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      return autoBuild();
+    }
   }
 }
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java
index 51c5491..cc2d1a9 100644
--- a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java
@@ -19,23 +19,35 @@
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_DEADLINE;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_PROJECT_ID;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_RESOURCE;
 
 import com.google.api.MonitoredResource;
 import com.google.api.gax.core.FixedCredentialsProvider;
+import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
+import com.google.api.gax.rpc.FixedHeaderProvider;
+import com.google.api.gax.rpc.HeaderProvider;
 import com.google.auth.Credentials;
 import com.google.auth.oauth2.GoogleCredentials;
 import com.google.cloud.ServiceOptions;
 import com.google.cloud.monitoring.v3.MetricServiceClient;
 import com.google.cloud.monitoring.v3.MetricServiceSettings;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.util.concurrent.MoreExecutors;
 import io.opencensus.common.Duration;
-import io.opencensus.stats.Stats;
-import io.opencensus.stats.ViewManager;
+import io.opencensus.common.OpenCensusLibraryInformation;
+import io.opencensus.exporter.metrics.util.IntervalMetricReader;
+import io.opencensus.exporter.metrics.util.MetricReader;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.Metrics;
 import java.io.IOException;
-import java.util.concurrent.ThreadFactory;
+import java.util.Map;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
 
 /**
  * Exporter to Stackdriver Monitoring Client API v3.
@@ -56,41 +68,62 @@
  *
  * @since 0.9
  */
+@ThreadSafe
 public final class StackdriverStatsExporter {
 
-  private static final Object monitor = new Object();
-
-  private final Thread workerThread;
+  @VisibleForTesting static final Object monitor = new Object();
 
   @GuardedBy("monitor")
   @Nullable
-  private static StackdriverStatsExporter exporter = null;
+  private static StackdriverStatsExporter instance = null;
 
-  private static final Duration ZERO = Duration.create(0, 0);
+  @GuardedBy("monitor")
+  @Nullable
+  private static MetricServiceClient metricServiceClient = null;
 
-  @VisibleForTesting static final Duration DEFAULT_INTERVAL = Duration.create(60, 0);
+  private static final String EXPORTER_SPAN_NAME = "ExportMetricsToStackdriver";
 
-  private static final MonitoredResource DEFAULT_RESOURCE =
-      StackdriverExportUtils.getDefaultResource();
+  // See io.grpc.internal.GrpcUtil.USER_AGENT_KEY
+  private static final String USER_AGENT_KEY = "user-agent";
+  private static final String USER_AGENT =
+      "opencensus-java/" + OpenCensusLibraryInformation.VERSION;
+  private static final HeaderProvider OPENCENSUS_USER_AGENT_HEADER_PROVIDER =
+      FixedHeaderProvider.create(USER_AGENT_KEY, USER_AGENT);
 
-  @VisibleForTesting
-  StackdriverStatsExporter(
+  private final IntervalMetricReader intervalMetricReader;
+
+  private StackdriverStatsExporter(
       String projectId,
       MetricServiceClient metricServiceClient,
       Duration exportInterval,
-      ViewManager viewManager,
       MonitoredResource monitoredResource,
-      @Nullable String metricNamePrefix) {
-    checkArgument(exportInterval.compareTo(ZERO) > 0, "Duration must be positive");
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            projectId,
-            metricServiceClient,
-            exportInterval,
-            viewManager,
-            monitoredResource,
-            metricNamePrefix);
-    this.workerThread = new DaemonThreadFactory().newThread(worker);
+      @Nullable String metricNamePrefix,
+      @Nullable String displayNamePrefix,
+      Map<LabelKey, LabelValue> constantLabels) {
+    IntervalMetricReader.Options.Builder intervalMetricReaderOptionsBuilder =
+        IntervalMetricReader.Options.builder();
+    intervalMetricReaderOptionsBuilder.setExportInterval(exportInterval);
+    intervalMetricReader =
+        IntervalMetricReader.create(
+            new CreateMetricDescriptorExporter(
+                projectId,
+                metricServiceClient,
+                metricNamePrefix,
+                displayNamePrefix,
+                constantLabels,
+                new CreateTimeSeriesExporter(
+                    projectId,
+                    metricServiceClient,
+                    monitoredResource,
+                    metricNamePrefix,
+                    constantLabels)),
+            MetricReader.create(
+                MetricReader.Options.builder()
+                    .setMetricProducerManager(
+                        Metrics.getExportComponent().getMetricProducerManager())
+                    .setSpanName(EXPORTER_SPAN_NAME)
+                    .build()),
+            intervalMetricReaderOptionsBuilder.build());
   }
 
   /**
@@ -112,7 +145,16 @@
     checkNotNull(credentials, "credentials");
     checkNotNull(projectId, "projectId");
     checkNotNull(exportInterval, "exportInterval");
-    createInternal(credentials, projectId, exportInterval, null, null);
+    createInternal(
+        credentials,
+        projectId,
+        exportInterval,
+        DEFAULT_RESOURCE,
+        null,
+        null,
+        DEFAULT_CONSTANT_LABELS,
+        DEFAULT_DEADLINE,
+        null);
   }
 
   /**
@@ -142,7 +184,16 @@
       throws IOException {
     checkNotNull(projectId, "projectId");
     checkNotNull(exportInterval, "exportInterval");
-    createInternal(null, projectId, exportInterval, null, null);
+    createInternal(
+        null,
+        projectId,
+        exportInterval,
+        DEFAULT_RESOURCE,
+        null,
+        null,
+        DEFAULT_CONSTANT_LABELS,
+        DEFAULT_DEADLINE,
+        null);
   }
 
   /**
@@ -180,7 +231,11 @@
         configuration.getProjectId(),
         configuration.getExportInterval(),
         configuration.getMonitoredResource(),
-        configuration.getMetricNamePrefix());
+        configuration.getMetricNamePrefix(),
+        configuration.getDisplayNamePrefix(),
+        configuration.getConstantLabels(),
+        configuration.getDeadline(),
+        configuration.getMetricServiceStub());
   }
 
   /**
@@ -210,7 +265,7 @@
    * @since 0.11.0
    */
   public static void createAndRegister() throws IOException {
-    createInternal(null, null, null, null, null);
+    createAndRegister(StackdriverStatsConfiguration.builder().build());
   }
 
   /**
@@ -237,7 +292,18 @@
   @Deprecated
   public static void createAndRegister(Duration exportInterval) throws IOException {
     checkNotNull(exportInterval, "exportInterval");
-    createInternal(null, null, exportInterval, null, null);
+    checkArgument(
+        !DEFAULT_PROJECT_ID.isEmpty(), "Cannot find a project ID from application default.");
+    createInternal(
+        null,
+        DEFAULT_PROJECT_ID,
+        exportInterval,
+        DEFAULT_RESOURCE,
+        null,
+        null,
+        DEFAULT_CONSTANT_LABELS,
+        DEFAULT_DEADLINE,
+        null);
   }
 
   /**
@@ -266,7 +332,16 @@
     checkNotNull(projectId, "projectId");
     checkNotNull(exportInterval, "exportInterval");
     checkNotNull(monitoredResource, "monitoredResource");
-    createInternal(null, projectId, exportInterval, monitoredResource, null);
+    createInternal(
+        null,
+        projectId,
+        exportInterval,
+        monitoredResource,
+        null,
+        null,
+        DEFAULT_CONSTANT_LABELS,
+        DEFAULT_DEADLINE,
+        null);
   }
 
   /**
@@ -293,71 +368,94 @@
       Duration exportInterval, MonitoredResource monitoredResource) throws IOException {
     checkNotNull(exportInterval, "exportInterval");
     checkNotNull(monitoredResource, "monitoredResource");
-    createInternal(null, null, exportInterval, monitoredResource, null);
+    checkArgument(
+        !DEFAULT_PROJECT_ID.isEmpty(), "Cannot find a project ID from application default.");
+    createInternal(
+        null,
+        DEFAULT_PROJECT_ID,
+        exportInterval,
+        monitoredResource,
+        null,
+        null,
+        DEFAULT_CONSTANT_LABELS,
+        DEFAULT_DEADLINE,
+        null);
   }
 
   // Use createInternal() (instead of constructor) to enforce singleton.
   private static void createInternal(
       @Nullable Credentials credentials,
-      @Nullable String projectId,
-      @Nullable Duration exportInterval,
-      @Nullable MonitoredResource monitoredResource,
-      @Nullable String metricNamePrefix)
+      String projectId,
+      Duration exportInterval,
+      MonitoredResource monitoredResource,
+      @Nullable String metricNamePrefix,
+      @Nullable String displayNamePrefix,
+      Map<LabelKey, LabelValue> constantLabels,
+      Duration deadline,
+      @Nullable MetricServiceStub stub)
       throws IOException {
-    projectId = projectId == null ? ServiceOptions.getDefaultProjectId() : projectId;
-    exportInterval = exportInterval == null ? DEFAULT_INTERVAL : exportInterval;
-    monitoredResource = monitoredResource == null ? DEFAULT_RESOURCE : monitoredResource;
     synchronized (monitor) {
-      checkState(exporter == null, "Stackdriver stats exporter is already created.");
-      MetricServiceClient metricServiceClient;
-      // Initialize MetricServiceClient inside lock to avoid creating multiple clients.
-      if (credentials == null) {
-        metricServiceClient = MetricServiceClient.create();
+      checkState(instance == null, "Stackdriver stats exporter is already created.");
+      final MetricServiceClient client;
+      if (stub == null) {
+        metricServiceClient = createMetricServiceClient(credentials, deadline);
+        client = metricServiceClient;
       } else {
-        metricServiceClient =
-            MetricServiceClient.create(
-                MetricServiceSettings.newBuilder()
-                    .setCredentialsProvider(FixedCredentialsProvider.create(credentials))
-                    .build());
+        client = MetricServiceClient.create(stub);
       }
-      exporter =
+      instance =
           new StackdriverStatsExporter(
               projectId,
-              metricServiceClient,
+              client,
               exportInterval,
-              Stats.getViewManager(),
               monitoredResource,
-              metricNamePrefix);
-      exporter.workerThread.start();
+              metricNamePrefix,
+              displayNamePrefix,
+              constantLabels);
     }
   }
 
-  // Resets exporter to null. Used only for unit tests.
+  // Initialize MetricServiceClient inside lock to avoid creating multiple clients.
+  @GuardedBy("monitor")
   @VisibleForTesting
-  static void unsafeResetExporter() {
-    synchronized (monitor) {
-      StackdriverStatsExporter.exporter = null;
+  static MetricServiceClient createMetricServiceClient(
+      @Nullable Credentials credentials, Duration deadline) throws IOException {
+    MetricServiceSettings.Builder settingsBuilder =
+        MetricServiceSettings.newBuilder()
+            .setTransportChannelProvider(
+                InstantiatingGrpcChannelProvider.newBuilder()
+                    .setHeaderProvider(OPENCENSUS_USER_AGENT_HEADER_PROVIDER)
+                    .build());
+    if (credentials != null) {
+      settingsBuilder.setCredentialsProvider(FixedCredentialsProvider.create(credentials));
     }
+
+    org.threeten.bp.Duration stackdriverDuration =
+        org.threeten.bp.Duration.ofMillis(deadline.toMillis());
+    // We use createMetricDescriptor and createTimeSeries APIs in this exporter.
+    settingsBuilder.createMetricDescriptorSettings().setSimpleTimeoutNoRetries(stackdriverDuration);
+    settingsBuilder.createTimeSeriesSettings().setSimpleTimeoutNoRetries(stackdriverDuration);
+
+    return MetricServiceClient.create(settingsBuilder.build());
   }
 
-  /** A lightweight {@link ThreadFactory} to spawn threads in a GAE-Java7-compatible way. */
-  // TODO(Hailong): Remove this once we use a callback to implement the exporter.
-  static final class DaemonThreadFactory implements ThreadFactory {
-    // AppEngine runtimes have constraints on threading and socket handling
-    // that need to be accommodated.
-    public static final boolean IS_RESTRICTED_APPENGINE =
-        System.getProperty("com.google.appengine.runtime.environment") != null
-            && "1.7".equals(System.getProperty("java.specification.version"));
-    private static final ThreadFactory threadFactory = MoreExecutors.platformThreadFactory();
-
-    @Override
-    public Thread newThread(Runnable r) {
-      Thread thread = threadFactory.newThread(r);
-      if (!IS_RESTRICTED_APPENGINE) {
-        thread.setName("ExportWorkerThread");
-        thread.setDaemon(true);
+  /**
+   * Unregisters the {@link StackdriverStatsExporter} and stops metrics exporting.
+   *
+   * <p>Unexported data will be flushed before the exporter is stopped.
+   *
+   * @since 0.23
+   */
+  public static void unregister() {
+    synchronized (monitor) {
+      if (instance != null) {
+        instance.intervalMetricReader.stop();
       }
-      return thread;
+      instance = null;
+      if (metricServiceClient != null) {
+        metricServiceClient.close();
+        metricServiceClient = null;
+      }
     }
   }
 }
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporterTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporterTest.java
new file mode 100644
index 0000000..a508217
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateMetricDescriptorExporterTest.java
@@ -0,0 +1,317 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.CUSTOM_OPENCENSUS_DOMAIN;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_DISPLAY_NAME_PREFIX;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import com.google.api.MetricDescriptor;
+import com.google.api.gax.rpc.UnaryCallable;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+import com.google.monitoring.v3.CreateMetricDescriptorRequest;
+import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.metrics.util.MetricExporter;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Value;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import javax.annotation.Nullable;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link CreateMetricDescriptorExporter}. */
+@RunWith(JUnit4.class)
+public class CreateMetricDescriptorExporterTest {
+  private static final String PROJECT_ID = "projectId";
+
+  private static final String METRIC_NAME = CUSTOM_OPENCENSUS_DOMAIN + "my_metric";
+  private static final String METRIC_NAME_2 = CUSTOM_OPENCENSUS_DOMAIN + "my_metric_2";
+  private static final String METRIC_NAME_3 = "bigquery.googleapis.com/query/count";
+  private static final String METRIC_NAME_CUSTOM_DOMAIN = "my.org/my_metric_3";
+  private static final String METRIC_DESCRIPTION = "metric_description";
+  private static final String METRIC_DESCRIPTION_2 = "metric_description2";
+  private static final String METRIC_UNIT = "us";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("KEY", "key description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("VALUE"));
+
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR_2 =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME_2, METRIC_DESCRIPTION_2, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  // Same name as METRIC_DESCRIPTOR but different descriptor.
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR_3 =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION_2, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  // Stackdriver built-in metric.
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR_4 =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME_3, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  // Metric with no domain.
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR_5 =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME_CUSTOM_DOMAIN,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          Type.CUMULATIVE_INT64,
+          LABEL_KEY);
+
+  private static final Value VALUE_LONG = Value.longValue(12345678);
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Timestamp TIMESTAMP_2 = Timestamp.fromMillis(1000);
+  private static final Point POINT = Point.create(VALUE_LONG, TIMESTAMP);
+
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, TIMESTAMP_2);
+
+  private static final Metric METRIC =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES);
+  private static final Metric METRIC_2 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR_2, CUMULATIVE_TIME_SERIES);
+  private static final Metric METRIC_3 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR_3, CUMULATIVE_TIME_SERIES);
+  private static final Metric METRIC_4 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR_4, CUMULATIVE_TIME_SERIES);
+  private static final Metric METRIC_5 =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR_5, CUMULATIVE_TIME_SERIES);
+
+  @Mock private MetricServiceStub mockStub;
+
+  @Mock
+  private UnaryCallable<CreateMetricDescriptorRequest, MetricDescriptor>
+      mockCreateMetricDescriptorCallable;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+
+    doReturn(mockCreateMetricDescriptorCallable).when(mockStub).createMetricDescriptorCallable();
+    doReturn(null)
+        .when(mockCreateMetricDescriptorCallable)
+        .call(any(CreateMetricDescriptorRequest.class));
+  }
+
+  @Test
+  public void export() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Arrays.asList(METRIC, METRIC_2));
+
+    verify(mockStub, times(2)).createMetricDescriptorCallable();
+
+    MetricDescriptor descriptor =
+        StackdriverExportUtils.createMetricDescriptor(
+            METRIC_DESCRIPTOR,
+            PROJECT_ID,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            DEFAULT_DISPLAY_NAME_PREFIX,
+            DEFAULT_CONSTANT_LABELS);
+    verify(mockCreateMetricDescriptorCallable, times(1))
+        .call(
+            eq(
+                CreateMetricDescriptorRequest.newBuilder()
+                    .setName("projects/" + PROJECT_ID)
+                    .setMetricDescriptor(descriptor)
+                    .build()));
+
+    MetricDescriptor descriptor2 =
+        StackdriverExportUtils.createMetricDescriptor(
+            METRIC_DESCRIPTOR_2,
+            PROJECT_ID,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            DEFAULT_DISPLAY_NAME_PREFIX,
+            DEFAULT_CONSTANT_LABELS);
+    verify(mockCreateMetricDescriptorCallable, times(1))
+        .call(
+            eq(
+                CreateMetricDescriptorRequest.newBuilder()
+                    .setName("projects/" + PROJECT_ID)
+                    .setMetricDescriptor(descriptor2)
+                    .build()));
+    assertThat(fakeMetricExporter.getLastExported()).containsExactly(METRIC, METRIC_2);
+  }
+
+  @Test
+  public void export_MetricNameWithCustomDomain() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Arrays.asList(METRIC_5));
+
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+
+    MetricDescriptor descriptor =
+        StackdriverExportUtils.createMetricDescriptor(
+            METRIC_DESCRIPTOR_5,
+            PROJECT_ID,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            DEFAULT_DISPLAY_NAME_PREFIX,
+            DEFAULT_CONSTANT_LABELS);
+    verify(mockCreateMetricDescriptorCallable, times(1))
+        .call(
+            eq(
+                CreateMetricDescriptorRequest.newBuilder()
+                    .setName("projects/" + PROJECT_ID)
+                    .setMetricDescriptor(descriptor)
+                    .build()));
+  }
+
+  @Test
+  public void doNotExportForEmptyMetrics() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Collections.<Metric>emptyList());
+    verify(mockStub, times(0)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).isEmpty();
+  }
+
+  @Test
+  public void doNotExportIfFailedToRegisterMetric() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    doThrow(new IllegalArgumentException()).when(mockStub).createMetricDescriptorCallable();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+
+    exporter.export(Collections.singletonList(METRIC));
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).isEmpty();
+  }
+
+  @Test
+  public void skipDifferentMetricsWithSameName() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Collections.singletonList(METRIC));
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).containsExactly(METRIC);
+
+    exporter.export(Collections.singletonList(METRIC_3));
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).isEmpty();
+  }
+
+  @Test
+  public void doNotCreateMetricDescriptorForRegisteredMetric() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Collections.singletonList(METRIC));
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).containsExactly(METRIC);
+
+    exporter.export(Collections.singletonList(METRIC));
+    verify(mockStub, times(1)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).containsExactly(METRIC);
+  }
+
+  @Test
+  public void doNotCreateMetricDescriptorForBuiltInMetric() {
+    FakeMetricExporter fakeMetricExporter = new FakeMetricExporter();
+    CreateMetricDescriptorExporter exporter =
+        new CreateMetricDescriptorExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            null,
+            null,
+            DEFAULT_CONSTANT_LABELS,
+            fakeMetricExporter);
+    exporter.export(Collections.singletonList(METRIC_4));
+
+    // Should not create MetricDescriptor for built-in metrics, but TimeSeries should be uploaded.
+    verify(mockStub, times(0)).createMetricDescriptorCallable();
+    assertThat(fakeMetricExporter.getLastExported()).containsExactly(METRIC_4);
+  }
+
+  private static final class FakeMetricExporter extends MetricExporter {
+    @Nullable private List<Metric> lastExported = null;
+
+    @Override
+    public void export(Collection<Metric> metrics) {
+      lastExported = new ArrayList<>(metrics);
+    }
+
+    @Nullable
+    List<Metric> getLastExported() {
+      return lastExported;
+    }
+  }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporterTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporterTest.java
new file mode 100644
index 0000000..560a1aa
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/CreateTimeSeriesExporterTest.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import com.google.api.MetricDescriptor;
+import com.google.api.MonitoredResource;
+import com.google.api.gax.rpc.UnaryCallable;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+import com.google.monitoring.v3.CreateMetricDescriptorRequest;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.protobuf.Empty;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Value;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link CreateTimeSeriesExporter}. */
+@RunWith(JUnit4.class)
+public class CreateTimeSeriesExporterTest {
+  private static final String PROJECT_ID = "projectId";
+  private static final String METRIC_NAME = "my metric";
+  private static final String METRIC_DESCRIPTION = "metric description";
+  private static final String METRIC_UNIT = "us";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("KEY", "key description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("VALUE"));
+
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  private static final Value VALUE_LONG = Value.longValue(12345678);
+  private static final Point POINT = Point.create(VALUE_LONG, Timestamp.fromMillis(3000));
+
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          LABEL_VALUE, POINT, Timestamp.fromMillis(1000));
+
+  private static final Metric METRIC =
+      Metric.createWithOneTimeSeries(METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES);
+
+  private static final MonitoredResource DEFAULT_RESOURCE =
+      MonitoredResource.newBuilder().setType("global").build();
+
+  @Mock private MetricServiceStub mockStub;
+
+  @Mock
+  private UnaryCallable<CreateMetricDescriptorRequest, MetricDescriptor>
+      mockCreateMetricDescriptorCallable;
+
+  @Mock private UnaryCallable<CreateTimeSeriesRequest, Empty> mockCreateTimeSeriesCallable;
+
+  @Before
+  public void setUp() {
+    MockitoAnnotations.initMocks(this);
+
+    doReturn(mockCreateMetricDescriptorCallable).when(mockStub).createMetricDescriptorCallable();
+    doReturn(mockCreateTimeSeriesCallable).when(mockStub).createTimeSeriesCallable();
+    doReturn(null)
+        .when(mockCreateMetricDescriptorCallable)
+        .call(any(CreateMetricDescriptorRequest.class));
+    doReturn(null).when(mockCreateTimeSeriesCallable).call(any(CreateTimeSeriesRequest.class));
+  }
+
+  @Test
+  public void export() {
+    CreateTimeSeriesExporter exporter =
+        new CreateTimeSeriesExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            DEFAULT_RESOURCE,
+            null,
+            DEFAULT_CONSTANT_LABELS);
+    exporter.export(Collections.singletonList(METRIC));
+    verify(mockStub, times(1)).createTimeSeriesCallable();
+
+    List<TimeSeries> timeSeries =
+        StackdriverExportUtils.createTimeSeriesList(
+            METRIC,
+            DEFAULT_RESOURCE,
+            StackdriverExportUtils.CUSTOM_OPENCENSUS_DOMAIN,
+            PROJECT_ID,
+            DEFAULT_CONSTANT_LABELS);
+
+    verify(mockCreateTimeSeriesCallable, times(1))
+        .call(
+            eq(
+                CreateTimeSeriesRequest.newBuilder()
+                    .setName("projects/" + PROJECT_ID)
+                    .addAllTimeSeries(timeSeries)
+                    .build()));
+  }
+
+  @Test
+  public void splitInMultipleBatches() {
+    CreateTimeSeriesExporter exporter =
+        new CreateTimeSeriesExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            DEFAULT_RESOURCE,
+            null,
+            DEFAULT_CONSTANT_LABELS);
+    final int numExportedTimeSeries = 4 * StackdriverExportUtils.MAX_BATCH_EXPORT_SIZE;
+    ArrayList<Metric> exportedMetrics = new ArrayList<>(numExportedTimeSeries);
+    for (int i = 0; i < numExportedTimeSeries; i++) {
+      exportedMetrics.add(METRIC);
+    }
+    exporter.export(exportedMetrics);
+    verify(mockStub, times(4)).createTimeSeriesCallable();
+  }
+
+  @Test
+  public void doNotExportForEmptyMetrics() {
+    CreateTimeSeriesExporter exporter =
+        new CreateTimeSeriesExporter(
+            PROJECT_ID,
+            new FakeMetricServiceClient(mockStub),
+            DEFAULT_RESOURCE,
+            null,
+            DEFAULT_CONSTANT_LABELS);
+    exporter.export(Collections.<Metric>emptyList());
+    verify(mockStub, times(0)).createTimeSeriesCallable();
+  }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/FakeMetricServiceClient.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/FakeMetricServiceClient.java
new file mode 100644
index 0000000..fff80a0
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/FakeMetricServiceClient.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+
+/**
+ * MetricServiceClient.createMetricDescriptor() and MetricServiceClient.createTimeSeries() are final
+ * methods and cannot be mocked. We have to use a mock MetricServiceStub in order to verify the
+ * output.
+ */
+final class FakeMetricServiceClient extends MetricServiceClient {
+
+  FakeMetricServiceClient(MetricServiceStub stub) {
+    super(stub);
+  }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java
index cd536e8..82fd5f9 100644
--- a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java
@@ -17,9 +17,16 @@
 package io.opencensus.exporter.stats.stackdriver;
 
 import static com.google.common.truth.Truth.assertThat;
-import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN;
-import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.CUSTOM_OPENCENSUS_DOMAIN;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_DISPLAY_NAME_PREFIX;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.PERCENTILE_LABEL_KEY;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.SNAPSHOT_SUFFIX_PERCENTILE;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.STACKDRIVER_PROJECT_ID_KEY;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.SUMMARY_SUFFIX_COUNT;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.SUMMARY_SUFFIX_SUM;
 
+import com.google.api.Distribution;
 import com.google.api.Distribution.BucketOptions;
 import com.google.api.Distribution.BucketOptions.Explicit;
 import com.google.api.LabelDescriptor;
@@ -29,221 +36,307 @@
 import com.google.api.MetricDescriptor.MetricKind;
 import com.google.api.MonitoredResource;
 import com.google.common.collect.ImmutableMap;
-import com.google.monitoring.v3.Point;
+import com.google.monitoring.v3.SpanContext;
 import com.google.monitoring.v3.TimeInterval;
 import com.google.monitoring.v3.TimeSeries;
 import com.google.monitoring.v3.TypedValue;
-import io.opencensus.common.Duration;
+import com.google.protobuf.Any;
+import com.google.protobuf.ByteString;
 import io.opencensus.common.Timestamp;
-import io.opencensus.stats.Aggregation.Count;
-import io.opencensus.stats.Aggregation.Distribution;
-import io.opencensus.stats.Aggregation.LastValue;
-import io.opencensus.stats.Aggregation.Mean;
-import io.opencensus.stats.Aggregation.Sum;
-import io.opencensus.stats.AggregationData.CountData;
-import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.LastValueDataDouble;
-import io.opencensus.stats.AggregationData.LastValueDataLong;
-import io.opencensus.stats.AggregationData.MeanData;
-import io.opencensus.stats.AggregationData.SumDataDouble;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.BucketBoundaries;
-import io.opencensus.stats.Measure.MeasureDouble;
-import io.opencensus.stats.Measure.MeasureLong;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow.Cumulative;
-import io.opencensus.stats.View.AggregationWindow.Interval;
-import io.opencensus.stats.View.Name;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
-import io.opencensus.stats.ViewData.AggregationWindowData.IntervalData;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
+import io.opencensus.contrib.exemplar.util.AttachmentValueSpanContext;
+import io.opencensus.contrib.exemplar.util.ExemplarUtils;
+import io.opencensus.contrib.resource.util.CloudResource;
+import io.opencensus.contrib.resource.util.ContainerResource;
+import io.opencensus.contrib.resource.util.HostResource;
+import io.opencensus.contrib.resource.util.K8sResource;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import io.opencensus.metrics.data.Exemplar;
+import io.opencensus.metrics.export.Distribution.Bucket;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.Summary;
+import io.opencensus.metrics.export.Summary.Snapshot;
+import io.opencensus.metrics.export.Summary.Snapshot.ValueAtPercentile;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.resource.Resource;
 import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
 /** Unit tests for {@link StackdriverExportUtils}. */
 @RunWith(JUnit4.class)
 public class StackdriverExportUtilsTest {
+  private static final String METRIC_NAME = "my measurement";
+  private static final String METRIC_DESCRIPTION = "measure description";
+  private static final String METRIC_UNIT = "us";
+  private static final String METRIC_UNIT_2 = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("KEY1", "key description"));
+  private static final List<LabelValue> LABEL_VALUE =
+      Collections.singletonList(LabelValue.create("VALUE1"));
+  private static final List<LabelValue> LABEL_VALUE_2 =
+      Collections.singletonList(LabelValue.create("VALUE2"));
+  private static final List<LabelKey> EMPTY_LABEL_KEY = new ArrayList<>();
+  private static final List<LabelValue> EMPTY_LABEL_VALUE = new ArrayList<>();
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DOUBLE, LABEL_KEY);
+  private static final io.opencensus.metrics.export.MetricDescriptor METRIC_DESCRIPTOR_2 =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT_2, Type.CUMULATIVE_INT64, EMPTY_LABEL_KEY);
+  private static final io.opencensus.metrics.export.MetricDescriptor GAUGE_METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
 
-  @Rule public final ExpectedException thrown = ExpectedException.none();
+  private static final List<Double> BUCKET_BOUNDARIES = Arrays.asList(1.0, 3.0, 5.0);
+  private static final io.opencensus.metrics.export.Distribution.BucketOptions BUCKET_OPTIONS =
+      io.opencensus.metrics.export.Distribution.BucketOptions.explicitOptions(BUCKET_BOUNDARIES);
+  private static final Value VALUE_DOUBLE = Value.doubleValue(12345678.2);
+  private static final Value VALUE_DOUBLE_2 = Value.doubleValue(133.79);
 
-  private static final TagKey KEY = TagKey.create("KEY");
-  private static final TagKey KEY_2 = TagKey.create("KEY2");
-  private static final TagKey KEY_3 = TagKey.create("KEY3");
-  private static final TagValue VALUE_1 = TagValue.create("VALUE1");
-  private static final TagValue VALUE_2 = TagValue.create("VALUE2");
-  private static final String MEASURE_UNIT = "us";
-  private static final String MEASURE_DESCRIPTION = "measure description";
-  private static final MeasureDouble MEASURE_DOUBLE =
-      MeasureDouble.create("measure1", MEASURE_DESCRIPTION, MEASURE_UNIT);
-  private static final MeasureLong MEASURE_LONG =
-      MeasureLong.create("measure2", MEASURE_DESCRIPTION, MEASURE_UNIT);
-  private static final String VIEW_NAME = "view";
-  private static final String VIEW_DESCRIPTION = "view description";
-  private static final Duration TEN_SECONDS = Duration.create(10, 0);
-  private static final Cumulative CUMULATIVE = Cumulative.create();
-  private static final Interval INTERVAL = Interval.create(TEN_SECONDS);
-  private static final BucketBoundaries BUCKET_BOUNDARIES =
-      BucketBoundaries.create(Arrays.asList(0.0, 1.0, 3.0, 5.0));
-  private static final Sum SUM = Sum.create();
-  private static final Count COUNT = Count.create();
-  private static final Mean MEAN = Mean.create();
-  private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
-  private static final LastValue LAST_VALUE = LastValue.create();
+  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(3000);
+  private static final Timestamp TIMESTAMP_2 = Timestamp.fromMillis(1000);
+  private static final Timestamp TIMESTAMP_3 = Timestamp.fromMillis(2000);
+  private static final Point POINT = Point.create(VALUE_DOUBLE, TIMESTAMP);
+  private static final Point POINT_2 = Point.create(VALUE_DOUBLE_2, TIMESTAMP_3);
+  private static final io.opencensus.metrics.export.TimeSeries CUMULATIVE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, TIMESTAMP_2);
+  private static final io.opencensus.metrics.export.TimeSeries GAUGE_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, POINT, null);
+  private static final io.opencensus.metrics.export.TimeSeries GAUGE_TIME_SERIES_2 =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE_2, POINT_2, null);
+  private static final io.opencensus.metrics.export.Metric METRIC =
+      io.opencensus.metrics.export.Metric.createWithOneTimeSeries(
+          METRIC_DESCRIPTOR, CUMULATIVE_TIME_SERIES);
   private static final String PROJECT_ID = "id";
   private static final MonitoredResource DEFAULT_RESOURCE =
       MonitoredResource.newBuilder().setType("global").build();
+  private static final MonitoredResource.Builder DEFAULT_RESOURCE_WITH_PROJECT_ID =
+      MonitoredResource.newBuilder().putLabels(STACKDRIVER_PROJECT_ID_KEY, "proj1");
+
   private static final String DEFAULT_TASK_VALUE =
       "java-" + ManagementFactory.getRuntimeMXBean().getName();
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS = Collections.emptyMap();
+
+  private static final io.opencensus.trace.SpanContext SPAN_CONTEXT_INVALID =
+      io.opencensus.trace.SpanContext.INVALID;
+  private static final Exemplar EXEMPLAR_1 =
+      Exemplar.create(
+          1.2,
+          TIMESTAMP_2,
+          Collections.<String, AttachmentValue>singletonMap(
+              "key", AttachmentValueString.create("value")));
+  private static final Exemplar EXEMPLAR_2 =
+      Exemplar.create(
+          5.6,
+          TIMESTAMP_3,
+          ImmutableMap.<String, AttachmentValue>of(
+              ExemplarUtils.ATTACHMENT_KEY_SPAN_CONTEXT,
+              AttachmentValueSpanContext.create(SPAN_CONTEXT_INVALID)));
+  private static final io.opencensus.metrics.export.Distribution DISTRIBUTION =
+      io.opencensus.metrics.export.Distribution.create(
+          3,
+          2,
+          14,
+          BUCKET_OPTIONS,
+          Arrays.asList(
+              Bucket.create(3),
+              Bucket.create(1, EXEMPLAR_1),
+              Bucket.create(2),
+              Bucket.create(4, EXEMPLAR_2)));
+  private static final Summary SUMMARY =
+      Summary.create(
+          10L,
+          10.0,
+          Snapshot.create(
+              10L, 87.07, Collections.singletonList(ValueAtPercentile.create(0.98, 10.2))));
+  private static final Value DOUBLE_VALUE = Value.doubleValue(1.1);
+  private static final Value LONG_VALUE = Value.longValue(10000);
+  private static final Value DISTRIBUTION_VALUE = Value.distributionValue(DISTRIBUTION);
+  private static final Value SUMMARY_VALUE = Value.summaryValue(SUMMARY);
+
+  private static final io.opencensus.metrics.export.MetricDescriptor HISTOGRAM_METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DISTRIBUTION, LABEL_KEY);
+
+  private static final Point DISTRIBUTION_POINT = Point.create(DISTRIBUTION_VALUE, TIMESTAMP);
+  private static final io.opencensus.metrics.export.TimeSeries DISTRIBUTION_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          LABEL_VALUE, DISTRIBUTION_POINT, null);
+  private static final io.opencensus.metrics.export.Metric DISTRIBUTION_METRIC =
+      io.opencensus.metrics.export.Metric.createWithOneTimeSeries(
+          HISTOGRAM_METRIC_DESCRIPTOR, DISTRIBUTION_TIME_SERIES);
+  private static final Summary SUMMARY_1 =
+      Summary.create(
+          22L,
+          74.8,
+          Snapshot.create(
+              10L,
+              87.07,
+              Arrays.asList(
+                  ValueAtPercentile.create(50, 6),
+                  ValueAtPercentile.create(75, 10.2),
+                  ValueAtPercentile.create(98, 4.6),
+                  ValueAtPercentile.create(99, 1.2))));
+  private static final Value SUMMARY_VALUE_1 = Value.summaryValue(SUMMARY_1);
+  private static final Point SUMMARY_POINT = Point.create(SUMMARY_VALUE_1, TIMESTAMP);
+  private static final Summary SUMMARY_NULL_SUM =
+      Summary.create(
+          22L,
+          null,
+          Snapshot.create(10L, 87.07, Collections.singletonList(ValueAtPercentile.create(50, 6))));
+  private static final Value SUMMARY_VALUE_NULL_SUM = Value.summaryValue(SUMMARY_NULL_SUM);
+  private static final Point SUMMARY_POINT_NULL_SUM =
+      Point.create(SUMMARY_VALUE_NULL_SUM, TIMESTAMP);
+  private static final io.opencensus.metrics.export.TimeSeries SUMMARY_TIME_SERIES_NULL_SUM =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+          LABEL_VALUE, SUMMARY_POINT_NULL_SUM, null);
+  private static final io.opencensus.metrics.export.MetricDescriptor SUMMARY_METRIC_DESCRIPTOR =
+      io.opencensus.metrics.export.MetricDescriptor.create(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          io.opencensus.metrics.export.MetricDescriptor.Type.SUMMARY,
+          LABEL_KEY);
+  private static final io.opencensus.metrics.export.TimeSeries SUMMARY_TIME_SERIES =
+      io.opencensus.metrics.export.TimeSeries.createWithOnePoint(LABEL_VALUE, SUMMARY_POINT, null);
+
+  private static final io.opencensus.metrics.export.Metric SUMMARY_METRIC =
+      io.opencensus.metrics.export.Metric.createWithOneTimeSeries(
+          SUMMARY_METRIC_DESCRIPTOR, SUMMARY_TIME_SERIES);
+  private static final io.opencensus.metrics.export.Metric SUMMARY_METRIC_NULL_SUM =
+      io.opencensus.metrics.export.Metric.createWithOneTimeSeries(
+          SUMMARY_METRIC_DESCRIPTOR, SUMMARY_TIME_SERIES_NULL_SUM);
 
   @Test
-  public void testConstant() {
-    assertThat(StackdriverExportUtils.LABEL_DESCRIPTION).isEqualTo("OpenCensus TagKey");
+  public void testConstants() {
+    assertThat(StackdriverExportUtils.OPENCENSUS_TASK_KEY)
+        .isEqualTo(LabelKey.create("opencensus_task", "Opencensus task identifier"));
+    assertThat(StackdriverExportUtils.OPENCENSUS_TASK_VALUE_DEFAULT.getValue())
+        .isEqualTo(DEFAULT_TASK_VALUE);
+    assertThat(StackdriverExportUtils.STACKDRIVER_PROJECT_ID_KEY).isEqualTo("project_id");
+    assertThat(StackdriverExportUtils.MAX_BATCH_EXPORT_SIZE).isEqualTo(200);
+    assertThat(StackdriverExportUtils.CUSTOM_METRIC_DOMAIN).isEqualTo("custom.googleapis.com/");
+    assertThat(StackdriverExportUtils.CUSTOM_OPENCENSUS_DOMAIN)
+        .isEqualTo("custom.googleapis.com/opencensus/");
+    assertThat(StackdriverExportUtils.DEFAULT_DISPLAY_NAME_PREFIX).isEqualTo("OpenCensus/");
   }
 
   @Test
   public void createLabelDescriptor() {
-    assertThat(StackdriverExportUtils.createLabelDescriptor(TagKey.create("string")))
+    assertThat(StackdriverExportUtils.createLabelDescriptor(LabelKey.create("key", "desc")))
         .isEqualTo(
             LabelDescriptor.newBuilder()
-                .setKey("string")
-                .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
+                .setKey("key")
+                .setDescription("desc")
                 .setValueType(ValueType.STRING)
                 .build());
   }
 
   @Test
   public void createMetricKind() {
-    assertThat(StackdriverExportUtils.createMetricKind(CUMULATIVE, SUM))
+    assertThat(StackdriverExportUtils.createMetricKind(Type.CUMULATIVE_INT64))
         .isEqualTo(MetricKind.CUMULATIVE);
-    assertThat(StackdriverExportUtils.createMetricKind(INTERVAL, COUNT))
+    assertThat(StackdriverExportUtils.createMetricKind(Type.SUMMARY))
         .isEqualTo(MetricKind.UNRECOGNIZED);
-    assertThat(StackdriverExportUtils.createMetricKind(CUMULATIVE, LAST_VALUE))
+    assertThat(StackdriverExportUtils.createMetricKind(Type.GAUGE_INT64))
         .isEqualTo(MetricKind.GAUGE);
-    assertThat(StackdriverExportUtils.createMetricKind(INTERVAL, LAST_VALUE))
+    assertThat(StackdriverExportUtils.createMetricKind(Type.GAUGE_DOUBLE))
         .isEqualTo(MetricKind.GAUGE);
   }
 
   @Test
   public void createValueType() {
-    assertThat(StackdriverExportUtils.createValueType(SUM, MEASURE_DOUBLE))
+    assertThat(StackdriverExportUtils.createValueType(Type.GAUGE_DOUBLE))
         .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
-    assertThat(StackdriverExportUtils.createValueType(SUM, MEASURE_LONG))
+    assertThat(StackdriverExportUtils.createValueType(Type.CUMULATIVE_INT64))
         .isEqualTo(MetricDescriptor.ValueType.INT64);
-    assertThat(StackdriverExportUtils.createValueType(COUNT, MEASURE_DOUBLE))
+    assertThat(StackdriverExportUtils.createValueType(Type.GAUGE_INT64))
         .isEqualTo(MetricDescriptor.ValueType.INT64);
-    assertThat(StackdriverExportUtils.createValueType(COUNT, MEASURE_LONG))
-        .isEqualTo(MetricDescriptor.ValueType.INT64);
-    assertThat(StackdriverExportUtils.createValueType(MEAN, MEASURE_DOUBLE))
+    assertThat(StackdriverExportUtils.createValueType(Type.CUMULATIVE_DOUBLE))
         .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
-    assertThat(StackdriverExportUtils.createValueType(MEAN, MEASURE_LONG))
-        .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
-    assertThat(StackdriverExportUtils.createValueType(DISTRIBUTION, MEASURE_DOUBLE))
+    assertThat(StackdriverExportUtils.createValueType(Type.GAUGE_DISTRIBUTION))
         .isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
-    assertThat(StackdriverExportUtils.createValueType(DISTRIBUTION, MEASURE_LONG))
+    assertThat(StackdriverExportUtils.createValueType(Type.CUMULATIVE_DISTRIBUTION))
         .isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
-    assertThat(StackdriverExportUtils.createValueType(LAST_VALUE, MEASURE_DOUBLE))
-        .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
-    assertThat(StackdriverExportUtils.createValueType(LAST_VALUE, MEASURE_LONG))
-        .isEqualTo(MetricDescriptor.ValueType.INT64);
-  }
-
-  @Test
-  public void createUnit() {
-    assertThat(StackdriverExportUtils.createUnit(SUM, MEASURE_DOUBLE)).isEqualTo(MEASURE_UNIT);
-    assertThat(StackdriverExportUtils.createUnit(COUNT, MEASURE_DOUBLE)).isEqualTo("1");
-    assertThat(StackdriverExportUtils.createUnit(MEAN, MEASURE_DOUBLE)).isEqualTo(MEASURE_UNIT);
-    assertThat(StackdriverExportUtils.createUnit(DISTRIBUTION, MEASURE_DOUBLE))
-        .isEqualTo(MEASURE_UNIT);
-    assertThat(StackdriverExportUtils.createUnit(LAST_VALUE, MEASURE_DOUBLE))
-        .isEqualTo(MEASURE_UNIT);
   }
 
   @Test
   public void createMetric() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            CUMULATIVE);
     assertThat(
             StackdriverExportUtils.createMetric(
-                view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+                METRIC_DESCRIPTOR, LABEL_VALUE, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_CONSTANT_LABELS))
         .isEqualTo(
             Metric.newBuilder()
-                .setType("custom.googleapis.com/opencensus/" + VIEW_NAME)
-                .putLabels("KEY", "VALUE1")
-                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
+                .setType("custom.googleapis.com/opencensus/" + METRIC_NAME)
+                .putLabels("KEY1", "VALUE1")
+                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getKey(), DEFAULT_TASK_VALUE)
                 .build());
   }
 
   @Test
   public void createMetric_WithExternalMetricDomain() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            CUMULATIVE);
     String prometheusDomain = "external.googleapis.com/prometheus/";
-    assertThat(StackdriverExportUtils.createMetric(view, Arrays.asList(VALUE_1), prometheusDomain))
-        .isEqualTo(
-            Metric.newBuilder()
-                .setType(prometheusDomain + VIEW_NAME)
-                .putLabels("KEY", "VALUE1")
-                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
-                .build());
-  }
-
-  @Test
-  public void createMetric_skipNullTagValue() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY, KEY_2, KEY_3),
-            CUMULATIVE);
     assertThat(
             StackdriverExportUtils.createMetric(
-                view, Arrays.asList(VALUE_1, null, VALUE_2), CUSTOM_OPENCENSUS_DOMAIN))
+                METRIC_DESCRIPTOR, LABEL_VALUE, prometheusDomain, DEFAULT_CONSTANT_LABELS))
         .isEqualTo(
             Metric.newBuilder()
-                .setType("custom.googleapis.com/opencensus/" + VIEW_NAME)
-                .putLabels("KEY", "VALUE1")
-                .putLabels("KEY3", "VALUE2")
-                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
+                .setType(prometheusDomain + METRIC_NAME)
+                .putLabels("KEY1", "VALUE1")
+                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getKey(), DEFAULT_TASK_VALUE)
                 .build());
   }
 
   @Test
-  public void createMetric_throwWhenTagKeysAndValuesHaveDifferentSize() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY, KEY_2, KEY_3),
-            CUMULATIVE);
-    List<TagValue> tagValues = Arrays.asList(VALUE_1, null);
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("TagKeys and TagValues don't have same size.");
-    StackdriverExportUtils.createMetric(view, tagValues, CUSTOM_OPENCENSUS_DOMAIN);
+  public void createMetric_EmptyLabel() {
+    assertThat(
+            StackdriverExportUtils.createMetric(
+                METRIC_DESCRIPTOR_2,
+                EMPTY_LABEL_VALUE,
+                CUSTOM_OPENCENSUS_DOMAIN,
+                DEFAULT_CONSTANT_LABELS))
+        .isEqualTo(
+            Metric.newBuilder()
+                .setType("custom.googleapis.com/opencensus/" + METRIC_NAME)
+                .putLabels(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getKey(), DEFAULT_TASK_VALUE)
+                .build());
+  }
+
+  @Test
+  public void createMetric_EmptyConstantLabels() {
+    assertThat(
+            StackdriverExportUtils.createMetric(
+                METRIC_DESCRIPTOR_2,
+                EMPTY_LABEL_VALUE,
+                CUSTOM_OPENCENSUS_DOMAIN,
+                EMPTY_CONSTANT_LABELS))
+        .isEqualTo(
+            Metric.newBuilder().setType("custom.googleapis.com/opencensus/" + METRIC_NAME).build());
+  }
+
+  @Test
+  public void createMetric_CustomConstantLabels() {
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.singletonMap(LabelKey.create("my_key", "desc"), LabelValue.create("value"));
+    assertThat(
+            StackdriverExportUtils.createMetric(
+                METRIC_DESCRIPTOR_2, EMPTY_LABEL_VALUE, CUSTOM_OPENCENSUS_DOMAIN, constantLabels))
+        .isEqualTo(
+            Metric.newBuilder()
+                .setType("custom.googleapis.com/opencensus/" + METRIC_NAME)
+                .putAllLabels(Collections.singletonMap("my_key", "value"))
+                .build());
   }
 
   @Test
@@ -261,37 +354,8 @@
   }
 
   @Test
-  public void createTimeInterval_cumulative() {
-    Timestamp censusTimestamp1 = Timestamp.create(100, 3000);
-    Timestamp censusTimestamp2 = Timestamp.create(200, 0);
-    assertThat(
-            StackdriverExportUtils.createTimeInterval(
-                CumulativeData.create(censusTimestamp1, censusTimestamp2), DISTRIBUTION))
-        .isEqualTo(
-            TimeInterval.newBuilder()
-                .setStartTime(StackdriverExportUtils.convertTimestamp(censusTimestamp1))
-                .setEndTime(StackdriverExportUtils.convertTimestamp(censusTimestamp2))
-                .build());
-    assertThat(
-            StackdriverExportUtils.createTimeInterval(
-                CumulativeData.create(censusTimestamp1, censusTimestamp2), LAST_VALUE))
-        .isEqualTo(
-            TimeInterval.newBuilder()
-                .setEndTime(StackdriverExportUtils.convertTimestamp(censusTimestamp2))
-                .build());
-  }
-
-  @Test
-  public void createTimeInterval_interval() {
-    IntervalData intervalData = IntervalData.create(Timestamp.create(200, 0));
-    // Only Cumulative view will supported in this version.
-    thrown.expect(IllegalArgumentException.class);
-    StackdriverExportUtils.createTimeInterval(intervalData, SUM);
-  }
-
-  @Test
   public void createBucketOptions() {
-    assertThat(StackdriverExportUtils.createBucketOptions(BUCKET_BOUNDARIES))
+    assertThat(StackdriverExportUtils.createBucketOptions(BUCKET_OPTIONS))
         .isEqualTo(
             BucketOptions.newBuilder()
                 .setExplicitBuckets(
@@ -300,259 +364,316 @@
   }
 
   @Test
+  public void createBucketOptions_Null() {
+    assertThat(StackdriverExportUtils.createBucketOptions(null))
+        .isEqualTo(BucketOptions.newBuilder().build());
+  }
+
+  @Test
   public void createDistribution() {
-    DistributionData distributionData =
-        DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
-    assertThat(StackdriverExportUtils.createDistribution(distributionData, BUCKET_BOUNDARIES))
+    StackdriverExportUtils.setCachedProjectIdForExemplar(null);
+    assertThat(StackdriverExportUtils.createDistribution(DISTRIBUTION))
         .isEqualTo(
-            com.google.api.Distribution.newBuilder()
-                .setMean(2)
+            Distribution.newBuilder()
                 .setCount(3)
-                // TODO(songya): uncomment this once Stackdriver supports setting max and min.
-                // .setRange(
-                //     com.google.api.Distribution.Range.newBuilder().setMin(0).setMax(5).build())
-                .setBucketOptions(StackdriverExportUtils.createBucketOptions(BUCKET_BOUNDARIES))
-                .addAllBucketCounts(Arrays.asList(0L, 1L, 1L, 0L, 1L))
+                .setMean(0.6666666666666666)
+                .setBucketOptions(StackdriverExportUtils.createBucketOptions(BUCKET_OPTIONS))
+                .addAllBucketCounts(Arrays.asList(0L, 3L, 1L, 2L, 4L))
                 .setSumOfSquaredDeviation(14)
+                .addAllExemplars(
+                    Arrays.<Distribution.Exemplar>asList(
+                        Distribution.Exemplar.newBuilder()
+                            .setValue(1.2)
+                            .setTimestamp(StackdriverExportUtils.convertTimestamp(TIMESTAMP_2))
+                            .addAttachments(
+                                Any.newBuilder()
+                                    .setTypeUrl(
+                                        StackdriverExportUtils.EXEMPLAR_ATTACHMENT_TYPE_STRING)
+                                    .setValue(ByteString.copyFromUtf8("value"))
+                                    .build())
+                            .build(),
+                        Distribution.Exemplar.newBuilder()
+                            .setValue(5.6)
+                            .setTimestamp(StackdriverExportUtils.convertTimestamp(TIMESTAMP_3))
+                            // Cached project ID is set to null, so no SpanContext attachment will
+                            // be created.
+                            .build()))
                 .build());
   }
 
   @Test
   public void createTypedValue() {
-    assertThat(StackdriverExportUtils.createTypedValue(SUM, SumDataDouble.create(1.1)))
+    assertThat(StackdriverExportUtils.createTypedValue(DOUBLE_VALUE))
         .isEqualTo(TypedValue.newBuilder().setDoubleValue(1.1).build());
-    assertThat(StackdriverExportUtils.createTypedValue(SUM, SumDataLong.create(10000)))
+    assertThat(StackdriverExportUtils.createTypedValue(LONG_VALUE))
         .isEqualTo(TypedValue.newBuilder().setInt64Value(10000).build());
-    assertThat(StackdriverExportUtils.createTypedValue(COUNT, CountData.create(55)))
-        .isEqualTo(TypedValue.newBuilder().setInt64Value(55).build());
-    assertThat(StackdriverExportUtils.createTypedValue(MEAN, MeanData.create(7.7, 8)))
-        .isEqualTo(TypedValue.newBuilder().setDoubleValue(7.7).build());
-    DistributionData distributionData =
-        DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
-    assertThat(StackdriverExportUtils.createTypedValue(DISTRIBUTION, distributionData))
+    assertThat(StackdriverExportUtils.createTypedValue(DISTRIBUTION_VALUE))
         .isEqualTo(
             TypedValue.newBuilder()
-                .setDistributionValue(
-                    StackdriverExportUtils.createDistribution(distributionData, BUCKET_BOUNDARIES))
+                .setDistributionValue(StackdriverExportUtils.createDistribution(DISTRIBUTION))
                 .build());
-    assertThat(StackdriverExportUtils.createTypedValue(LAST_VALUE, LastValueDataDouble.create(9.9)))
-        .isEqualTo(TypedValue.newBuilder().setDoubleValue(9.9).build());
-    assertThat(StackdriverExportUtils.createTypedValue(LAST_VALUE, LastValueDataLong.create(90000)))
-        .isEqualTo(TypedValue.newBuilder().setInt64Value(90000).build());
   }
 
   @Test
-  public void createPoint_cumulative() {
-    Timestamp censusTimestamp1 = Timestamp.create(100, 3000);
-    Timestamp censusTimestamp2 = Timestamp.create(200, 0);
-    CumulativeData cumulativeData = CumulativeData.create(censusTimestamp1, censusTimestamp2);
-    SumDataDouble sumDataDouble = SumDataDouble.create(33.3);
+  public void createTypedValue_UnknownType() {
+    assertThat(StackdriverExportUtils.createTypedValue(SUMMARY_VALUE))
+        .isEqualTo(TypedValue.newBuilder().build());
+  }
 
-    assertThat(StackdriverExportUtils.createPoint(sumDataDouble, cumulativeData, SUM))
+  @Test
+  public void createPoint() {
+    assertThat(StackdriverExportUtils.createPoint(POINT, null))
         .isEqualTo(
-            Point.newBuilder()
-                .setInterval(StackdriverExportUtils.createTimeInterval(cumulativeData, SUM))
-                .setValue(StackdriverExportUtils.createTypedValue(SUM, sumDataDouble))
+            com.google.monitoring.v3.Point.newBuilder()
+                .setInterval(
+                    TimeInterval.newBuilder()
+                        .setEndTime(StackdriverExportUtils.convertTimestamp(TIMESTAMP))
+                        .build())
+                .setValue(StackdriverExportUtils.createTypedValue(VALUE_DOUBLE))
                 .build());
   }
 
   @Test
-  public void createPoint_interval() {
-    IntervalData intervalData = IntervalData.create(Timestamp.create(200, 0));
-    SumDataDouble sumDataDouble = SumDataDouble.create(33.3);
-    // Only Cumulative view will supported in this version.
-    thrown.expect(IllegalArgumentException.class);
-    StackdriverExportUtils.createPoint(sumDataDouble, intervalData, SUM);
+  public void createPoint_Cumulative() {
+    assertThat(StackdriverExportUtils.createPoint(POINT, TIMESTAMP_2))
+        .isEqualTo(
+            com.google.monitoring.v3.Point.newBuilder()
+                .setInterval(
+                    TimeInterval.newBuilder()
+                        .setStartTime(StackdriverExportUtils.convertTimestamp(TIMESTAMP_2))
+                        .setEndTime(StackdriverExportUtils.convertTimestamp(TIMESTAMP))
+                        .build())
+                .setValue(StackdriverExportUtils.createTypedValue(VALUE_DOUBLE))
+                .build());
   }
 
   @Test
-  public void createMetricDescriptor_cumulative() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            CUMULATIVE);
+  public void createMetricDescriptor() {
     MetricDescriptor metricDescriptor =
         StackdriverExportUtils.createMetricDescriptor(
-            view, PROJECT_ID, "custom.googleapis.com/myorg/", "myorg/");
+            METRIC_DESCRIPTOR,
+            PROJECT_ID,
+            "custom.googleapis.com/myorg/",
+            "myorg/",
+            DEFAULT_CONSTANT_LABELS);
     assertThat(metricDescriptor.getName())
         .isEqualTo(
             "projects/"
                 + PROJECT_ID
                 + "/metricDescriptors/custom.googleapis.com/myorg/"
-                + VIEW_NAME);
-    assertThat(metricDescriptor.getDescription()).isEqualTo(VIEW_DESCRIPTION);
-    assertThat(metricDescriptor.getDisplayName()).isEqualTo("myorg/" + VIEW_NAME);
-    assertThat(metricDescriptor.getType()).isEqualTo("custom.googleapis.com/myorg/" + VIEW_NAME);
-    assertThat(metricDescriptor.getUnit()).isEqualTo(MEASURE_UNIT);
+                + METRIC_NAME);
+    assertThat(metricDescriptor.getDescription()).isEqualTo(METRIC_DESCRIPTION);
+    assertThat(metricDescriptor.getDisplayName()).isEqualTo("myorg/" + METRIC_NAME);
+    assertThat(metricDescriptor.getType()).isEqualTo("custom.googleapis.com/myorg/" + METRIC_NAME);
+    assertThat(metricDescriptor.getUnit()).isEqualTo(METRIC_UNIT);
     assertThat(metricDescriptor.getMetricKind()).isEqualTo(MetricKind.CUMULATIVE);
-    assertThat(metricDescriptor.getValueType()).isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
+
+    assertThat(metricDescriptor.getValueType()).isEqualTo(MetricDescriptor.ValueType.DOUBLE);
     assertThat(metricDescriptor.getLabelsList())
         .containsExactly(
             LabelDescriptor.newBuilder()
-                .setKey(KEY.getName())
-                .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
+                .setKey(LABEL_KEY.get(0).getKey())
+                .setDescription(LABEL_KEY.get(0).getDescription())
                 .setValueType(ValueType.STRING)
                 .build(),
             LabelDescriptor.newBuilder()
-                .setKey(StackdriverExportUtils.OPENCENSUS_TASK)
-                .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_DESCRIPTION)
+                .setKey(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getKey())
+                .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getDescription())
                 .setValueType(ValueType.STRING)
                 .build());
   }
 
   @Test
-  public void createMetricDescriptor_cumulative_count() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            COUNT,
-            Arrays.asList(KEY),
-            CUMULATIVE);
+  public void createMetricDescriptor_WithCustomConstantLabels() {
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.singletonMap(LabelKey.create("my_key", "desc"), LabelValue.create("value"));
     MetricDescriptor metricDescriptor =
         StackdriverExportUtils.createMetricDescriptor(
-            view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX);
+            METRIC_DESCRIPTOR,
+            PROJECT_ID,
+            "custom.googleapis.com/myorg/",
+            "myorg/",
+            constantLabels);
+    assertThat(metricDescriptor.getLabelsList())
+        .containsExactly(
+            LabelDescriptor.newBuilder()
+                .setKey(LABEL_KEY.get(0).getKey())
+                .setDescription(LABEL_KEY.get(0).getDescription())
+                .setValueType(ValueType.STRING)
+                .build(),
+            LabelDescriptor.newBuilder()
+                .setKey("my_key")
+                .setDescription("desc")
+                .setValueType(ValueType.STRING)
+                .build());
+  }
+
+  @Test
+  public void createMetricDescriptor_cumulative() {
+    MetricDescriptor metricDescriptor =
+        StackdriverExportUtils.createMetricDescriptor(
+            METRIC_DESCRIPTOR_2,
+            PROJECT_ID,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            DEFAULT_DISPLAY_NAME_PREFIX,
+            DEFAULT_CONSTANT_LABELS);
     assertThat(metricDescriptor.getName())
         .isEqualTo(
             "projects/"
                 + PROJECT_ID
                 + "/metricDescriptors/custom.googleapis.com/opencensus/"
-                + VIEW_NAME);
-    assertThat(metricDescriptor.getDescription()).isEqualTo(VIEW_DESCRIPTION);
-    assertThat(metricDescriptor.getDisplayName()).isEqualTo("OpenCensus/" + VIEW_NAME);
+                + METRIC_NAME);
+    assertThat(metricDescriptor.getDescription()).isEqualTo(METRIC_DESCRIPTION);
+    assertThat(metricDescriptor.getDisplayName()).isEqualTo("OpenCensus/" + METRIC_NAME);
     assertThat(metricDescriptor.getType())
-        .isEqualTo("custom.googleapis.com/opencensus/" + VIEW_NAME);
+        .isEqualTo("custom.googleapis.com/opencensus/" + METRIC_NAME);
     assertThat(metricDescriptor.getUnit()).isEqualTo("1");
     assertThat(metricDescriptor.getMetricKind()).isEqualTo(MetricKind.CUMULATIVE);
     assertThat(metricDescriptor.getValueType()).isEqualTo(MetricDescriptor.ValueType.INT64);
     assertThat(metricDescriptor.getLabelsList())
         .containsExactly(
             LabelDescriptor.newBuilder()
-                .setKey(KEY.getName())
-                .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
-                .setValueType(ValueType.STRING)
-                .build(),
-            LabelDescriptor.newBuilder()
-                .setKey(StackdriverExportUtils.OPENCENSUS_TASK)
-                .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_DESCRIPTION)
+                .setKey(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getKey())
+                .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_KEY.getDescription())
                 .setValueType(ValueType.STRING)
                 .build());
   }
 
   @Test
-  public void createMetricDescriptor_interval() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            INTERVAL);
-    assertThat(
-            StackdriverExportUtils.createMetricDescriptor(
-                view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX))
-        .isNull();
+  public void createTimeSeriesList_Cumulative() {
+    List<TimeSeries> timeSeriesList =
+        StackdriverExportUtils.createTimeSeriesList(
+            METRIC,
+            DEFAULT_RESOURCE,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            PROJECT_ID,
+            DEFAULT_CONSTANT_LABELS);
+    assertThat(timeSeriesList).hasSize(1);
+    TimeSeries expectedTimeSeries =
+        TimeSeries.newBuilder()
+            .setMetricKind(MetricKind.CUMULATIVE)
+            .setValueType(MetricDescriptor.ValueType.DOUBLE)
+            .setMetric(
+                StackdriverExportUtils.createMetric(
+                    METRIC_DESCRIPTOR,
+                    LABEL_VALUE,
+                    CUSTOM_OPENCENSUS_DOMAIN,
+                    DEFAULT_CONSTANT_LABELS))
+            .setResource(MonitoredResource.newBuilder().setType("global"))
+            .addPoints(StackdriverExportUtils.createPoint(POINT, TIMESTAMP_2))
+            .build();
+    assertThat(timeSeriesList).containsExactly(expectedTimeSeries);
   }
 
   @Test
-  public void createTimeSeriesList_cumulative() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            CUMULATIVE);
-    DistributionData distributionData1 =
-        DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
-    DistributionData distributionData2 =
-        DistributionData.create(-1, 1, -1, -1, 0, Arrays.asList(1L, 0L, 0L, 0L, 0L));
-    Map<List<TagValue>, DistributionData> aggregationMap =
-        ImmutableMap.of(
-            Arrays.asList(VALUE_1), distributionData1, Arrays.asList(VALUE_2), distributionData2);
-    CumulativeData cumulativeData =
-        CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
-    ViewData viewData = ViewData.create(view, aggregationMap, cumulativeData);
+  public void createTimeSeriesList_Distribution() {
     List<TimeSeries> timeSeriesList =
         StackdriverExportUtils.createTimeSeriesList(
-            viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN);
+            DISTRIBUTION_METRIC,
+            DEFAULT_RESOURCE,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            PROJECT_ID,
+            DEFAULT_CONSTANT_LABELS);
+
+    assertThat(timeSeriesList.size()).isEqualTo(1);
+    TimeSeries timeSeries = timeSeriesList.get(0);
+    assertThat(timeSeries.getPointsCount()).isEqualTo(1);
+    String expectedSpanName =
+        "projects/id/traces/00000000000000000000000000000000/spans/0000000000000000";
+    assertThat(timeSeries.getPoints(0).getValue().getDistributionValue())
+        .isEqualTo(
+            com.google.api.Distribution.newBuilder()
+                .setCount(3)
+                .setMean(0.6666666666666666)
+                .setBucketOptions(
+                    BucketOptions.newBuilder()
+                        .setExplicitBuckets(
+                            Explicit.newBuilder()
+                                .addAllBounds(Arrays.asList(0.0, 1.0, 3.0, 5.0))
+                                .build())
+                        .build())
+                .addAllBucketCounts(Arrays.asList(0L, 3L, 1L, 2L, 4L))
+                .setSumOfSquaredDeviation(14)
+                .addAllExemplars(
+                    Arrays.<Distribution.Exemplar>asList(
+                        Distribution.Exemplar.newBuilder()
+                            .setValue(1.2)
+                            .setTimestamp(StackdriverExportUtils.convertTimestamp(TIMESTAMP_2))
+                            .addAttachments(
+                                Any.newBuilder()
+                                    .setTypeUrl(
+                                        StackdriverExportUtils.EXEMPLAR_ATTACHMENT_TYPE_STRING)
+                                    .setValue(ByteString.copyFromUtf8("value"))
+                                    .build())
+                            .build(),
+                        Distribution.Exemplar.newBuilder()
+                            .setValue(5.6)
+                            .setTimestamp(StackdriverExportUtils.convertTimestamp(TIMESTAMP_3))
+                            .addAttachments(
+                                Any.newBuilder()
+                                    .setTypeUrl(
+                                        StackdriverExportUtils
+                                            .EXEMPLAR_ATTACHMENT_TYPE_SPAN_CONTEXT)
+                                    .setValue(
+                                        SpanContext.newBuilder()
+                                            .setSpanName(expectedSpanName)
+                                            .build()
+                                            .toByteString())
+                                    .build())
+                            .build()))
+                .build());
+  }
+
+  @Test
+  public void createTimeSeriesList_Gauge() {
+    io.opencensus.metrics.export.Metric metric =
+        io.opencensus.metrics.export.Metric.create(
+            GAUGE_METRIC_DESCRIPTOR, Arrays.asList(GAUGE_TIME_SERIES, GAUGE_TIME_SERIES_2));
+
+    List<TimeSeries> timeSeriesList =
+        StackdriverExportUtils.createTimeSeriesList(
+            metric,
+            DEFAULT_RESOURCE,
+            CUSTOM_OPENCENSUS_DOMAIN,
+            PROJECT_ID,
+            DEFAULT_CONSTANT_LABELS);
     assertThat(timeSeriesList).hasSize(2);
     TimeSeries expected1 =
         TimeSeries.newBuilder()
-            .setMetricKind(MetricKind.CUMULATIVE)
-            .setValueType(MetricDescriptor.ValueType.DISTRIBUTION)
+            .setMetricKind(MetricKind.GAUGE)
+            .setValueType(MetricDescriptor.ValueType.DOUBLE)
             .setMetric(
                 StackdriverExportUtils.createMetric(
-                    view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+                    GAUGE_METRIC_DESCRIPTOR,
+                    LABEL_VALUE,
+                    CUSTOM_OPENCENSUS_DOMAIN,
+                    DEFAULT_CONSTANT_LABELS))
             .setResource(MonitoredResource.newBuilder().setType("global"))
-            .addPoints(
-                StackdriverExportUtils.createPoint(distributionData1, cumulativeData, DISTRIBUTION))
+            .addPoints(StackdriverExportUtils.createPoint(POINT, null))
             .build();
     TimeSeries expected2 =
         TimeSeries.newBuilder()
-            .setMetricKind(MetricKind.CUMULATIVE)
-            .setValueType(MetricDescriptor.ValueType.DISTRIBUTION)
+            .setMetricKind(MetricKind.GAUGE)
+            .setValueType(MetricDescriptor.ValueType.DOUBLE)
             .setMetric(
                 StackdriverExportUtils.createMetric(
-                    view, Arrays.asList(VALUE_2), CUSTOM_OPENCENSUS_DOMAIN))
+                    GAUGE_METRIC_DESCRIPTOR,
+                    LABEL_VALUE_2,
+                    CUSTOM_OPENCENSUS_DOMAIN,
+                    DEFAULT_CONSTANT_LABELS))
             .setResource(MonitoredResource.newBuilder().setType("global"))
-            .addPoints(
-                StackdriverExportUtils.createPoint(distributionData2, cumulativeData, DISTRIBUTION))
+            .addPoints(StackdriverExportUtils.createPoint(POINT_2, null))
             .build();
     assertThat(timeSeriesList).containsExactly(expected1, expected2);
   }
 
   @Test
-  public void createTimeSeriesList_interval() {
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            DISTRIBUTION,
-            Arrays.asList(KEY),
-            INTERVAL);
-    Map<List<TagValue>, DistributionData> aggregationMap =
-        ImmutableMap.of(
-            Arrays.asList(VALUE_1),
-            DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L)),
-            Arrays.asList(VALUE_2),
-            DistributionData.create(-1, 1, -1, -1, 0, Arrays.asList(1L, 0L, 0L, 0L, 0L)));
-    ViewData viewData =
-        ViewData.create(view, aggregationMap, IntervalData.create(Timestamp.fromMillis(2000)));
-    assertThat(
-            StackdriverExportUtils.createTimeSeriesList(
-                viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN))
-        .isEmpty();
-  }
-
-  @Test
   public void createTimeSeriesList_withCustomMonitoredResource() {
     MonitoredResource resource =
         MonitoredResource.newBuilder().setType("global").putLabels("key", "value").build();
-    View view =
-        View.create(
-            Name.create(VIEW_NAME),
-            VIEW_DESCRIPTION,
-            MEASURE_DOUBLE,
-            SUM,
-            Arrays.asList(KEY),
-            CUMULATIVE);
-    SumDataDouble sumData = SumDataDouble.create(55.5);
-    Map<List<TagValue>, SumDataDouble> aggregationMap =
-        ImmutableMap.of(Arrays.asList(VALUE_1), sumData);
-    CumulativeData cumulativeData =
-        CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
-    ViewData viewData = ViewData.create(view, aggregationMap, cumulativeData);
     List<TimeSeries> timeSeriesList =
-        StackdriverExportUtils.createTimeSeriesList(viewData, resource, CUSTOM_OPENCENSUS_DOMAIN);
+        StackdriverExportUtils.createTimeSeriesList(
+            METRIC, resource, CUSTOM_OPENCENSUS_DOMAIN, PROJECT_ID, DEFAULT_CONSTANT_LABELS);
     assertThat(timeSeriesList)
         .containsExactly(
             TimeSeries.newBuilder()
@@ -560,9 +681,233 @@
                 .setValueType(MetricDescriptor.ValueType.DOUBLE)
                 .setMetric(
                     StackdriverExportUtils.createMetric(
-                        view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+                        METRIC_DESCRIPTOR,
+                        LABEL_VALUE,
+                        CUSTOM_OPENCENSUS_DOMAIN,
+                        DEFAULT_CONSTANT_LABELS))
                 .setResource(resource)
-                .addPoints(StackdriverExportUtils.createPoint(sumData, cumulativeData, SUM))
+                .addPoints(StackdriverExportUtils.createPoint(POINT, TIMESTAMP_2))
                 .build());
   }
+
+  @Test
+  public void convertSummaryMetric() {
+    io.opencensus.metrics.export.MetricDescriptor expectedMetricDescriptor1 =
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            METRIC_NAME + SUMMARY_SUFFIX_COUNT,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT_2,
+            Type.CUMULATIVE_INT64,
+            LABEL_KEY);
+    io.opencensus.metrics.export.MetricDescriptor expectedMetricDescriptor2 =
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            METRIC_NAME + SUMMARY_SUFFIX_SUM,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            Type.CUMULATIVE_DOUBLE,
+            LABEL_KEY);
+    List<LabelKey> labelKeys = new ArrayList<>(LABEL_KEY);
+    labelKeys.add(PERCENTILE_LABEL_KEY);
+    io.opencensus.metrics.export.MetricDescriptor expectedMetricDescriptor3 =
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            METRIC_NAME + SNAPSHOT_SUFFIX_PERCENTILE,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            Type.GAUGE_DOUBLE,
+            labelKeys);
+    List<io.opencensus.metrics.export.TimeSeries> expectedTimeSeries1 =
+        Collections.singletonList(
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                LABEL_VALUE, Point.create(Value.longValue(22), TIMESTAMP), null));
+    List<io.opencensus.metrics.export.TimeSeries> expectedTimeSeries2 =
+        Collections.singletonList(
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                LABEL_VALUE, Point.create(Value.doubleValue(74.8), TIMESTAMP), null));
+    LabelValue existingLabelValues = LABEL_VALUE.get(0);
+    List<io.opencensus.metrics.export.TimeSeries> expectedTimeSeries3 =
+        Arrays.asList(
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                Arrays.asList(existingLabelValues, LabelValue.create("50.0")),
+                Point.create(Value.doubleValue(6), TIMESTAMP),
+                null),
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                Arrays.asList(existingLabelValues, LabelValue.create("75.0")),
+                Point.create(Value.doubleValue(10.2), TIMESTAMP),
+                null),
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                Arrays.asList(existingLabelValues, LabelValue.create("98.0")),
+                Point.create(Value.doubleValue(4.6), TIMESTAMP),
+                null),
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                Arrays.asList(existingLabelValues, LabelValue.create("99.0")),
+                Point.create(Value.doubleValue(1.2), TIMESTAMP),
+                null));
+    List<io.opencensus.metrics.export.Metric> metrics =
+        StackdriverExportUtils.convertSummaryMetric(SUMMARY_METRIC);
+
+    assertThat(metrics).isNotEmpty();
+    assertThat(metrics.size()).isEqualTo(3);
+    assertThat(metrics.get(0).getMetricDescriptor()).isEqualTo(expectedMetricDescriptor1);
+    assertThat(metrics.get(0).getTimeSeriesList()).isNotEmpty();
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeries1);
+    assertThat(metrics.get(1).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeries2);
+    assertThat(metrics.get(1).getTimeSeriesList()).isNotEmpty();
+    assertThat(metrics.get(1).getMetricDescriptor()).isEqualTo(expectedMetricDescriptor2);
+    assertThat(metrics.get(2).getTimeSeriesList()).isNotEmpty();
+    assertThat(metrics.get(2).getMetricDescriptor()).isEqualTo(expectedMetricDescriptor3);
+    assertThat(metrics.get(2).getTimeSeriesList().size()).isEqualTo(4);
+    assertThat(metrics.get(2).getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeries3);
+  }
+
+  @Test
+  public void convertSummaryMetricWithNullSum() {
+    io.opencensus.metrics.export.MetricDescriptor expectedMetricDescriptor1 =
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            METRIC_NAME + SUMMARY_SUFFIX_COUNT,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT_2,
+            Type.CUMULATIVE_INT64,
+            LABEL_KEY);
+    List<LabelKey> labelKeys = new ArrayList<>(LABEL_KEY);
+    labelKeys.add(PERCENTILE_LABEL_KEY);
+    io.opencensus.metrics.export.MetricDescriptor expectedMetricDescriptor2 =
+        io.opencensus.metrics.export.MetricDescriptor.create(
+            METRIC_NAME + SNAPSHOT_SUFFIX_PERCENTILE,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            Type.GAUGE_DOUBLE,
+            labelKeys);
+    List<io.opencensus.metrics.export.TimeSeries> expectedTimeSeries1 =
+        Collections.singletonList(
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                LABEL_VALUE, Point.create(Value.longValue(22), TIMESTAMP), null));
+    LabelValue existingLabelValues = LABEL_VALUE.get(0);
+    List<io.opencensus.metrics.export.TimeSeries> expectedTimeSeries2 =
+        Collections.singletonList(
+            io.opencensus.metrics.export.TimeSeries.createWithOnePoint(
+                Arrays.asList(existingLabelValues, LabelValue.create("50.0")),
+                Point.create(Value.doubleValue(6), TIMESTAMP),
+                null));
+    List<io.opencensus.metrics.export.Metric> metrics =
+        StackdriverExportUtils.convertSummaryMetric(SUMMARY_METRIC_NULL_SUM);
+
+    assertThat(metrics).isNotEmpty();
+    assertThat(metrics.size()).isEqualTo(2);
+    assertThat(metrics.get(0).getMetricDescriptor()).isEqualTo(expectedMetricDescriptor1);
+    assertThat(metrics.get(0).getTimeSeriesList()).isNotEmpty();
+    assertThat(metrics.get(0).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(0).getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeries1);
+    assertThat(metrics.get(1).getTimeSeriesList()).isNotEmpty();
+    assertThat(metrics.get(1).getMetricDescriptor()).isEqualTo(expectedMetricDescriptor2);
+    assertThat(metrics.get(1).getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metrics.get(1).getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeries2);
+  }
+
+  @Test
+  public void setResourceForBuilder_GcpInstanceType() {
+    MonitoredResource.Builder monitoredResourceBuilder = DEFAULT_RESOURCE_WITH_PROJECT_ID.clone();
+    Map<String, String> resourceLabels = new HashMap<String, String>();
+    resourceLabels.put(CloudResource.ACCOUNT_ID_KEY, "proj1");
+    resourceLabels.put(CloudResource.PROVIDER_KEY, CloudResource.PROVIDER_GCP);
+    resourceLabels.put(HostResource.ID_KEY, "inst1");
+    resourceLabels.put(CloudResource.ZONE_KEY, "zone1");
+    resourceLabels.put("extra_key", "must be ignored");
+    Map<String, String> expectedResourceLabels = new HashMap<String, String>();
+    expectedResourceLabels.put("project_id", "proj1");
+    expectedResourceLabels.put("instance_id", "inst1");
+    expectedResourceLabels.put("zone", "zone1");
+    Resource resource = Resource.create(HostResource.TYPE, resourceLabels);
+
+    StackdriverExportUtils.setResourceForBuilder(monitoredResourceBuilder, resource);
+
+    assertThat(monitoredResourceBuilder.getType()).isNotNull();
+    assertThat(monitoredResourceBuilder.getLabelsMap()).isNotEmpty();
+    assertThat(monitoredResourceBuilder.getType()).isEqualTo("gce_instance");
+    assertThat(monitoredResourceBuilder.getLabelsMap().size()).isEqualTo(3);
+    assertThat(monitoredResourceBuilder.getLabelsMap())
+        .containsExactlyEntriesIn(expectedResourceLabels);
+  }
+
+  @Test
+  public void setResourceForBuilder_K8sInstanceType() {
+    MonitoredResource.Builder monitoredResourceBuilder = DEFAULT_RESOURCE_WITH_PROJECT_ID.clone();
+    Map<String, String> resourceLabels = new HashMap<String, String>();
+    resourceLabels.put(CloudResource.ZONE_KEY, "zone1");
+    resourceLabels.put(HostResource.ID_KEY, "instance1");
+    resourceLabels.put(K8sResource.CLUSTER_NAME_KEY, "cluster1");
+    resourceLabels.put(ContainerResource.NAME_KEY, "container1");
+    resourceLabels.put(K8sResource.NAMESPACE_NAME_KEY, "namespace1");
+    resourceLabels.put(K8sResource.POD_NAME_KEY, "pod1");
+    resourceLabels.put("extra_key", "must be ignored");
+    Map<String, String> expectedResourceLabels = new HashMap<String, String>();
+    expectedResourceLabels.put("project_id", "proj1");
+    expectedResourceLabels.put("location", "zone1");
+    expectedResourceLabels.put("cluster_name", "cluster1");
+    expectedResourceLabels.put("namespace_name", "namespace1");
+    expectedResourceLabels.put("pod_name", "pod1");
+    expectedResourceLabels.put("container_name", "container1");
+    Resource resource = Resource.create(ContainerResource.TYPE, resourceLabels);
+
+    StackdriverExportUtils.setResourceForBuilder(monitoredResourceBuilder, resource);
+
+    assertThat(monitoredResourceBuilder.getType()).isNotNull();
+    assertThat(monitoredResourceBuilder.getLabelsMap()).isNotEmpty();
+    assertThat(monitoredResourceBuilder.getType()).isEqualTo("k8s_container");
+    assertThat(monitoredResourceBuilder.getLabelsMap().size()).isEqualTo(6);
+    assertThat(monitoredResourceBuilder.getLabelsMap())
+        .containsExactlyEntriesIn(expectedResourceLabels);
+  }
+
+  @Test
+  public void setResourceForBuilder_AwsInstanceType() {
+    MonitoredResource.Builder monitoredResourceBuilder = DEFAULT_RESOURCE_WITH_PROJECT_ID.clone();
+    Map<String, String> resourceLabels = new HashMap<String, String>();
+    resourceLabels.put(CloudResource.REGION_KEY, "region1");
+    resourceLabels.put(CloudResource.PROVIDER_KEY, CloudResource.PROVIDER_AWS);
+    resourceLabels.put(CloudResource.ACCOUNT_ID_KEY, "account1");
+    resourceLabels.put(HostResource.ID_KEY, "instance1");
+    resourceLabels.put("extra_key", "must be ignored");
+    Map<String, String> expectedResourceLabels = new HashMap<String, String>();
+    expectedResourceLabels.put("project_id", "proj1");
+    expectedResourceLabels.put("instance_id", "instance1");
+    expectedResourceLabels.put(
+        "region", StackdriverExportUtils.AWS_REGION_VALUE_PREFIX + "region1");
+    expectedResourceLabels.put("aws_account", "account1");
+
+    Resource resource = Resource.create(HostResource.TYPE, resourceLabels);
+
+    StackdriverExportUtils.setResourceForBuilder(monitoredResourceBuilder, resource);
+
+    assertThat(monitoredResourceBuilder.getType()).isNotNull();
+    assertThat(monitoredResourceBuilder.getLabelsMap()).isNotEmpty();
+    assertThat(monitoredResourceBuilder.getType()).isEqualTo("aws_ec2_instance");
+    assertThat(monitoredResourceBuilder.getLabelsMap().size()).isEqualTo(4);
+    assertThat(monitoredResourceBuilder.getLabelsMap())
+        .containsExactlyEntriesIn(expectedResourceLabels);
+  }
+
+  @Test
+  public void getDomain() {
+    assertThat(StackdriverExportUtils.getDomain(null))
+        .isEqualTo("custom.googleapis.com/opencensus/");
+    assertThat(StackdriverExportUtils.getDomain("")).isEqualTo("custom.googleapis.com/opencensus/");
+    assertThat(StackdriverExportUtils.getDomain("custom.googleapis.com/myorg/"))
+        .isEqualTo("custom.googleapis.com/myorg/");
+    assertThat(StackdriverExportUtils.getDomain("external.googleapis.com/prometheus/"))
+        .isEqualTo("external.googleapis.com/prometheus/");
+    assertThat(StackdriverExportUtils.getDomain("myorg")).isEqualTo("myorg/");
+  }
+
+  @Test
+  public void getDisplayNamePrefix() {
+    assertThat(StackdriverExportUtils.getDisplayNamePrefix(null)).isEqualTo("OpenCensus/");
+    assertThat(StackdriverExportUtils.getDisplayNamePrefix("")).isEqualTo("");
+    assertThat(StackdriverExportUtils.getDisplayNamePrefix("custom.googleapis.com/myorg/"))
+        .isEqualTo("custom.googleapis.com/myorg/");
+    assertThat(StackdriverExportUtils.getDisplayNamePrefix("external.googleapis.com/prometheus/"))
+        .isEqualTo("external.googleapis.com/prometheus/");
+    assertThat(StackdriverExportUtils.getDisplayNamePrefix("myorg")).isEqualTo("myorg/");
+  }
 }
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java
deleted file mode 100644
index 2759382..0000000
--- a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java
+++ /dev/null
@@ -1,310 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.exporter.stats.stackdriver;
-
-import static com.google.common.truth.Truth.assertThat;
-import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN;
-import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
-import com.google.api.MetricDescriptor;
-import com.google.api.MonitoredResource;
-import com.google.api.gax.rpc.UnaryCallable;
-import com.google.cloud.monitoring.v3.MetricServiceClient;
-import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import com.google.monitoring.v3.CreateMetricDescriptorRequest;
-import com.google.monitoring.v3.CreateTimeSeriesRequest;
-import com.google.monitoring.v3.TimeSeries;
-import com.google.protobuf.Empty;
-import io.opencensus.common.Duration;
-import io.opencensus.common.Timestamp;
-import io.opencensus.stats.Aggregation.Sum;
-import io.opencensus.stats.AggregationData;
-import io.opencensus.stats.AggregationData.SumDataLong;
-import io.opencensus.stats.Measure.MeasureLong;
-import io.opencensus.stats.View;
-import io.opencensus.stats.View.AggregationWindow.Cumulative;
-import io.opencensus.stats.View.AggregationWindow.Interval;
-import io.opencensus.stats.View.Name;
-import io.opencensus.stats.ViewData;
-import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
-import io.opencensus.stats.ViewManager;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-/** Unit tests for {@link StackdriverExporterWorker}. */
-@RunWith(JUnit4.class)
-public class StackdriverExporterWorkerTest {
-
-  private static final String PROJECT_ID = "projectId";
-  private static final Duration ONE_SECOND = Duration.create(1, 0);
-  private static final TagKey KEY = TagKey.create("KEY");
-  private static final TagValue VALUE = TagValue.create("VALUE");
-  private static final String MEASURE_NAME = "my measurement";
-  private static final String MEASURE_UNIT = "us";
-  private static final String MEASURE_DESCRIPTION = "measure description";
-  private static final MeasureLong MEASURE =
-      MeasureLong.create(MEASURE_NAME, MEASURE_DESCRIPTION, MEASURE_UNIT);
-  private static final Name VIEW_NAME = Name.create("my view");
-  private static final String VIEW_DESCRIPTION = "view description";
-  private static final Cumulative CUMULATIVE = Cumulative.create();
-  private static final Interval INTERVAL = Interval.create(ONE_SECOND);
-  private static final Sum SUM = Sum.create();
-  private static final MonitoredResource DEFAULT_RESOURCE =
-      MonitoredResource.newBuilder().setType("global").build();
-
-  @Mock private ViewManager mockViewManager;
-
-  @Mock private MetricServiceStub mockStub;
-
-  @Mock
-  private UnaryCallable<CreateMetricDescriptorRequest, MetricDescriptor>
-      mockCreateMetricDescriptorCallable;
-
-  @Mock private UnaryCallable<CreateTimeSeriesRequest, Empty> mockCreateTimeSeriesCallable;
-
-  @Before
-  public void setUp() {
-    MockitoAnnotations.initMocks(this);
-
-    doReturn(mockCreateMetricDescriptorCallable).when(mockStub).createMetricDescriptorCallable();
-    doReturn(mockCreateTimeSeriesCallable).when(mockStub).createTimeSeriesCallable();
-    doReturn(null)
-        .when(mockCreateMetricDescriptorCallable)
-        .call(any(CreateMetricDescriptorRequest.class));
-    doReturn(null).when(mockCreateTimeSeriesCallable).call(any(CreateTimeSeriesRequest.class));
-  }
-
-  @Test
-  public void testConstants() {
-    assertThat(StackdriverExporterWorker.MAX_BATCH_EXPORT_SIZE).isEqualTo(200);
-    assertThat(StackdriverExporterWorker.CUSTOM_METRIC_DOMAIN).isEqualTo("custom.googleapis.com/");
-    assertThat(StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN)
-        .isEqualTo("custom.googleapis.com/opencensus/");
-    assertThat(StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX).isEqualTo("OpenCensus/");
-  }
-
-  @Test
-  public void export() throws IOException {
-    View view =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
-    ViewData viewData =
-        ViewData.create(
-            view,
-            ImmutableMap.of(Arrays.asList(VALUE), SumDataLong.create(1)),
-            CumulativeData.create(Timestamp.fromMillis(100), Timestamp.fromMillis(200)));
-    doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
-    doReturn(viewData).when(mockViewManager).getView(VIEW_NAME);
-
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-    worker.export();
-
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-    verify(mockStub, times(1)).createTimeSeriesCallable();
-
-    MetricDescriptor descriptor =
-        StackdriverExportUtils.createMetricDescriptor(
-            view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX);
-    List<TimeSeries> timeSeries =
-        StackdriverExportUtils.createTimeSeriesList(
-            viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN);
-    verify(mockCreateMetricDescriptorCallable, times(1))
-        .call(
-            eq(
-                CreateMetricDescriptorRequest.newBuilder()
-                    .setName("projects/" + PROJECT_ID)
-                    .setMetricDescriptor(descriptor)
-                    .build()));
-    verify(mockCreateTimeSeriesCallable, times(1))
-        .call(
-            eq(
-                CreateTimeSeriesRequest.newBuilder()
-                    .setName("projects/" + PROJECT_ID)
-                    .addAllTimeSeries(timeSeries)
-                    .build()));
-  }
-
-  @Test
-  public void doNotExportForEmptyViewData() {
-    View view =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
-    ViewData empty =
-        ViewData.create(
-            view,
-            Collections.<List<TagValue>, AggregationData>emptyMap(),
-            CumulativeData.create(Timestamp.fromMillis(100), Timestamp.fromMillis(200)));
-    doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
-    doReturn(empty).when(mockViewManager).getView(VIEW_NAME);
-
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-
-    worker.export();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-    verify(mockStub, times(0)).createTimeSeriesCallable();
-  }
-
-  @Test
-  public void doNotExportIfFailedToRegisterView() {
-    View view =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
-    doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
-    doThrow(new IllegalArgumentException()).when(mockStub).createMetricDescriptorCallable();
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-
-    assertThat(worker.registerView(view)).isFalse();
-    worker.export();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-    verify(mockStub, times(0)).createTimeSeriesCallable();
-  }
-
-  @Test
-  public void skipDifferentViewWithSameName() throws IOException {
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-    View view1 =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
-    assertThat(worker.registerView(view1)).isTrue();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-
-    View view2 =
-        View.create(
-            VIEW_NAME,
-            "This is a different description.",
-            MEASURE,
-            SUM,
-            Arrays.asList(KEY),
-            CUMULATIVE);
-    assertThat(worker.registerView(view2)).isFalse();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-  }
-
-  @Test
-  public void doNotCreateMetricDescriptorForRegisteredView() {
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-    View view =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
-    assertThat(worker.registerView(view)).isTrue();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-
-    assertThat(worker.registerView(view)).isTrue();
-    verify(mockStub, times(1)).createMetricDescriptorCallable();
-  }
-
-  @Test
-  public void doNotCreateMetricDescriptorForIntervalView() {
-    StackdriverExporterWorker worker =
-        new StackdriverExporterWorker(
-            PROJECT_ID,
-            new FakeMetricServiceClient(mockStub),
-            ONE_SECOND,
-            mockViewManager,
-            DEFAULT_RESOURCE,
-            null);
-    View view =
-        View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), INTERVAL);
-    assertThat(worker.registerView(view)).isFalse();
-    verify(mockStub, times(0)).createMetricDescriptorCallable();
-  }
-
-  @Test
-  public void getDomain() {
-    assertThat(StackdriverExporterWorker.getDomain(null))
-        .isEqualTo("custom.googleapis.com/opencensus/");
-    assertThat(StackdriverExporterWorker.getDomain(""))
-        .isEqualTo("custom.googleapis.com/opencensus/");
-    assertThat(StackdriverExporterWorker.getDomain("custom.googleapis.com/myorg/"))
-        .isEqualTo("custom.googleapis.com/myorg/");
-    assertThat(StackdriverExporterWorker.getDomain("external.googleapis.com/prometheus/"))
-        .isEqualTo("external.googleapis.com/prometheus/");
-    assertThat(StackdriverExporterWorker.getDomain("myorg")).isEqualTo("myorg/");
-  }
-
-  @Test
-  public void getDisplayNamePrefix() {
-    assertThat(StackdriverExporterWorker.getDisplayNamePrefix(null)).isEqualTo("OpenCensus/");
-    assertThat(StackdriverExporterWorker.getDisplayNamePrefix("")).isEqualTo("");
-    assertThat(StackdriverExporterWorker.getDisplayNamePrefix("custom.googleapis.com/myorg/"))
-        .isEqualTo("custom.googleapis.com/myorg/");
-    assertThat(
-            StackdriverExporterWorker.getDisplayNamePrefix("external.googleapis.com/prometheus/"))
-        .isEqualTo("external.googleapis.com/prometheus/");
-    assertThat(StackdriverExporterWorker.getDisplayNamePrefix("myorg")).isEqualTo("myorg/");
-  }
-
-  /*
-   * MetricServiceClient.createMetricDescriptor() and MetricServiceClient.createTimeSeries() are
-   * final methods and cannot be mocked. We have to use a mock MetricServiceStub in order to verify
-   * the output.
-   */
-  private static final class FakeMetricServiceClient extends MetricServiceClient {
-
-    protected FakeMetricServiceClient(MetricServiceStub stub) {
-      super(stub);
-    }
-  }
-}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java
index 2d5eba1..cde84d7 100644
--- a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java
@@ -17,16 +17,29 @@
 package io.opencensus.exporter.stats.stackdriver;
 
 import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExportUtils.DEFAULT_CONSTANT_LABELS;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_DEADLINE;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_INTERVAL;
 
 import com.google.api.MonitoredResource;
 import com.google.auth.Credentials;
 import com.google.auth.oauth2.AccessToken;
 import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.ServiceOptions;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
 import io.opencensus.common.Duration;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import java.util.Collections;
 import java.util.Date;
+import java.util.Map;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.Mockito;
 
 /** Unit tests for {@link StackdriverStatsConfiguration}. */
 @RunWith(JUnit4.class)
@@ -35,13 +48,24 @@
   private static final Credentials FAKE_CREDENTIALS =
       GoogleCredentials.newBuilder().setAccessToken(new AccessToken("fake", new Date(100))).build();
   private static final String PROJECT_ID = "project";
-  private static final Duration DURATION = Duration.create(10, 0);
+  private static final Duration DURATION = Duration.create(60, 0);
+  private static final Duration NEG_ONE_MINUTE = Duration.create(-60, 0);
   private static final MonitoredResource RESOURCE =
       MonitoredResource.newBuilder()
           .setType("gce-instance")
           .putLabels("instance-id", "instance")
           .build();
   private static final String CUSTOM_PREFIX = "myorg";
+  private static final String CUSTOM_DISPLAY_PREFIX = "display-prefix";
+
+  @Mock private final MetricServiceStub mockStub = Mockito.mock(MetricServiceStub.class);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void testConstants() {
+    assertThat(DEFAULT_INTERVAL).isEqualTo(Duration.create(60, 0));
+  }
 
   @Test
   public void testBuild() {
@@ -52,21 +76,155 @@
             .setExportInterval(DURATION)
             .setMonitoredResource(RESOURCE)
             .setMetricNamePrefix(CUSTOM_PREFIX)
+            .setDisplayNamePrefix(CUSTOM_DISPLAY_PREFIX)
+            .setConstantLabels(Collections.<LabelKey, LabelValue>emptyMap())
+            .setDeadline(DURATION)
+            .setMetricServiceStub(mockStub)
             .build();
     assertThat(configuration.getCredentials()).isEqualTo(FAKE_CREDENTIALS);
     assertThat(configuration.getProjectId()).isEqualTo(PROJECT_ID);
     assertThat(configuration.getExportInterval()).isEqualTo(DURATION);
     assertThat(configuration.getMonitoredResource()).isEqualTo(RESOURCE);
     assertThat(configuration.getMetricNamePrefix()).isEqualTo(CUSTOM_PREFIX);
+    assertThat(configuration.getDisplayNamePrefix()).isEqualTo(CUSTOM_DISPLAY_PREFIX);
+    assertThat(configuration.getConstantLabels()).isEmpty();
+    assertThat(configuration.getDeadline()).isEqualTo(DURATION);
+    assertThat(configuration.getMetricServiceStub()).isEqualTo(mockStub);
   }
 
   @Test
   public void testBuild_Default() {
-    StackdriverStatsConfiguration configuration = StackdriverStatsConfiguration.builder().build();
+    StackdriverStatsConfiguration configuration;
+    try {
+      configuration = StackdriverStatsConfiguration.builder().build();
+    } catch (Exception e) {
+      // Some test hosts may not have cloud project ID set up.
+      configuration = StackdriverStatsConfiguration.builder().setProjectId("test").build();
+    }
     assertThat(configuration.getCredentials()).isNull();
-    assertThat(configuration.getProjectId()).isNull();
-    assertThat(configuration.getExportInterval()).isNull();
-    assertThat(configuration.getMonitoredResource()).isNull();
+    assertThat(configuration.getProjectId()).isNotNull();
+    assertThat(configuration.getExportInterval()).isEqualTo(DEFAULT_INTERVAL);
+    assertThat(configuration.getMonitoredResource()).isNotNull();
     assertThat(configuration.getMetricNamePrefix()).isNull();
+    assertThat(configuration.getConstantLabels()).isEqualTo(DEFAULT_CONSTANT_LABELS);
+    assertThat(configuration.getDeadline()).isEqualTo(DEFAULT_DEADLINE);
+    assertThat(configuration.getMetricServiceStub()).isNull();
+  }
+
+  @Test
+  public void disallowNullProjectId() {
+    StackdriverStatsConfiguration.Builder builder = StackdriverStatsConfiguration.builder();
+    thrown.expect(NullPointerException.class);
+    builder.setProjectId(null);
+  }
+
+  @Test
+  public void disallowEmptyProjectId() {
+    StackdriverStatsConfiguration.Builder builder = StackdriverStatsConfiguration.builder();
+    builder.setProjectId("");
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void allowToUseDefaultProjectId() {
+    String defaultProjectId = ServiceOptions.getDefaultProjectId();
+    if (defaultProjectId != null) {
+      StackdriverStatsConfiguration configuration = StackdriverStatsConfiguration.builder().build();
+      assertThat(configuration.getProjectId()).isEqualTo(defaultProjectId);
+    }
+  }
+
+  @Test
+  public void disallowNullResource() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId(PROJECT_ID);
+    thrown.expect(NullPointerException.class);
+    builder.setMonitoredResource(null);
+  }
+
+  @Test
+  public void disallowNullExportInterval() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId(PROJECT_ID);
+    thrown.expect(NullPointerException.class);
+    builder.setExportInterval(null);
+  }
+
+  @Test
+  public void disallowNullConstantLabels() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId(PROJECT_ID);
+    thrown.expect(NullPointerException.class);
+    builder.setConstantLabels(null);
+  }
+
+  @Test
+  public void disallowNullConstantLabelKey() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId(PROJECT_ID);
+    Map<LabelKey, LabelValue> labels = Collections.singletonMap(null, LabelValue.create("val"));
+    builder.setConstantLabels(labels);
+    thrown.expect(NullPointerException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNullConstantLabelValue() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId(PROJECT_ID);
+    Map<LabelKey, LabelValue> labels =
+        Collections.singletonMap(LabelKey.create("key", "desc"), null);
+    builder.setConstantLabels(labels);
+    thrown.expect(NullPointerException.class);
+    builder.build();
+  }
+
+  @Test
+  public void allowNullCredentials() {
+    StackdriverStatsConfiguration configuration =
+        StackdriverStatsConfiguration.builder()
+            .setProjectId(PROJECT_ID)
+            .setCredentials(null)
+            .build();
+    assertThat(configuration.getCredentials()).isNull();
+  }
+
+  @Test
+  public void allowNullMetricPrefix() {
+    StackdriverStatsConfiguration configuration =
+        StackdriverStatsConfiguration.builder()
+            .setProjectId(PROJECT_ID)
+            .setMetricNamePrefix(null)
+            .build();
+    assertThat(configuration.getMetricNamePrefix()).isNull();
+  }
+
+  @Test
+  public void allowNullDisplayPrefix() {
+    StackdriverStatsConfiguration configuration =
+        StackdriverStatsConfiguration.builder()
+            .setProjectId(PROJECT_ID)
+            .setDisplayNamePrefix(null)
+            .build();
+    assertThat(configuration.getMetricNamePrefix()).isNull();
+  }
+
+  @Test
+  public void disallowZeroDuration() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId("test");
+    builder.setDeadline(StackdriverStatsConfiguration.Builder.ZERO);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNegativeDuration() {
+    StackdriverStatsConfiguration.Builder builder =
+        StackdriverStatsConfiguration.builder().setProjectId("test");
+    builder.setDeadline(NEG_ONE_MINUTE);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
   }
 }
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java
index f5e3edd..846d650 100644
--- a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java
@@ -17,10 +17,14 @@
 package io.opencensus.exporter.stats.stackdriver;
 
 import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverStatsConfiguration.DEFAULT_DEADLINE;
 
+import com.google.api.gax.core.GoogleCredentialsProvider;
+import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
 import com.google.auth.Credentials;
 import com.google.auth.oauth2.AccessToken;
 import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
 import io.opencensus.common.Duration;
 import java.io.IOException;
 import java.util.Date;
@@ -35,8 +39,8 @@
 public class StackdriverStatsExporterTest {
 
   private static final String PROJECT_ID = "projectId";
-  private static final Duration ONE_SECOND = Duration.create(1, 0);
-  private static final Duration NEG_ONE_SECOND = Duration.create(-1, 0);
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private static final Duration NEG_ONE_MINUTE = Duration.create(-60, 0);
   private static final Credentials FAKE_CREDENTIALS =
       GoogleCredentials.newBuilder().setAccessToken(new AccessToken("fake", new Date(100))).build();
   private static final StackdriverStatsConfiguration CONFIGURATION =
@@ -48,11 +52,6 @@
   @Rule public final ExpectedException thrown = ExpectedException.none();
 
   @Test
-  public void testConstants() {
-    assertThat(StackdriverStatsExporter.DEFAULT_INTERVAL).isEqualTo(Duration.create(60, 0));
-  }
-
-  @Test
   public void createWithNullStackdriverStatsConfiguration() throws IOException {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("configuration");
@@ -64,10 +63,11 @@
     StackdriverStatsConfiguration configuration =
         StackdriverStatsConfiguration.builder()
             .setCredentials(FAKE_CREDENTIALS)
-            .setExportInterval(NEG_ONE_SECOND)
+            .setProjectId(PROJECT_ID)
+            .setExportInterval(NEG_ONE_MINUTE)
             .build();
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Duration must be positive");
+    thrown.expectMessage("Export interval must be positive");
     StackdriverStatsExporter.createAndRegister(configuration);
   }
 
@@ -77,7 +77,7 @@
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("credentials");
     StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
-        null, PROJECT_ID, ONE_SECOND);
+        null, PROJECT_ID, ONE_MINUTE);
   }
 
   @Test
@@ -86,7 +86,7 @@
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("projectId");
     StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
-        GoogleCredentials.newBuilder().build(), null, ONE_SECOND);
+        GoogleCredentials.newBuilder().build(), null, ONE_MINUTE);
   }
 
   @Test
@@ -102,9 +102,9 @@
   @SuppressWarnings("deprecation")
   public void createWithNegativeDuration() throws IOException {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Duration must be positive");
+    thrown.expectMessage("Export interval must be positive");
     StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
-        GoogleCredentials.newBuilder().build(), PROJECT_ID, NEG_ONE_SECOND);
+        GoogleCredentials.newBuilder().build(), PROJECT_ID, NEG_ONE_MINUTE);
   }
 
   @Test
@@ -115,7 +115,20 @@
       thrown.expectMessage("Stackdriver stats exporter is already created.");
       StackdriverStatsExporter.createAndRegister(CONFIGURATION);
     } finally {
-      StackdriverStatsExporter.unsafeResetExporter();
+      StackdriverStatsExporter.unregister();
+    }
+  }
+
+  @Test
+  public void unregister() throws IOException {
+    // unregister has no effect if exporter is not yet registered.
+    StackdriverStatsExporter.unregister();
+    try {
+      StackdriverStatsExporter.createAndRegister(CONFIGURATION);
+      StackdriverStatsExporter.unregister();
+      StackdriverStatsExporter.createAndRegister(CONFIGURATION);
+    } finally {
+      StackdriverStatsExporter.unregister();
     }
   }
 
@@ -124,6 +137,41 @@
   public void createWithNullMonitoredResource() throws IOException {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("monitoredResource");
-    StackdriverStatsExporter.createAndRegisterWithMonitoredResource(ONE_SECOND, null);
+    StackdriverStatsExporter.createAndRegisterWithMonitoredResource(ONE_MINUTE, null);
+  }
+
+  @Test
+  public void createMetricServiceClient() throws IOException {
+    MetricServiceClient client;
+    synchronized (StackdriverStatsExporter.monitor) {
+      client =
+          StackdriverStatsExporter.createMetricServiceClient(FAKE_CREDENTIALS, DEFAULT_DEADLINE);
+    }
+    assertThat(client.getSettings().getCredentialsProvider().getCredentials())
+        .isEqualTo(FAKE_CREDENTIALS);
+    assertThat(client.getSettings().getTransportChannelProvider())
+        .isInstanceOf(InstantiatingGrpcChannelProvider.class);
+    // There's no way to get HeaderProvider from TransportChannelProvider.
+    assertThat(client.getSettings().getTransportChannelProvider().needsHeaders()).isFalse();
+  }
+
+  @Test
+  public void createMetricServiceClient_WithoutCredentials() {
+    try {
+      MetricServiceClient client;
+      synchronized (StackdriverStatsExporter.monitor) {
+        client = StackdriverStatsExporter.createMetricServiceClient(null, DEFAULT_DEADLINE);
+      }
+      assertThat(client.getSettings().getCredentialsProvider())
+          .isInstanceOf(GoogleCredentialsProvider.class);
+      assertThat(client.getSettings().getTransportChannelProvider())
+          .isInstanceOf(InstantiatingGrpcChannelProvider.class);
+      // There's no way to get HeaderProvider from TransportChannelProvider.
+      assertThat(client.getSettings().getTransportChannelProvider().needsHeaders()).isFalse();
+    } catch (IOException e) {
+      // This test depends on the Application Default Credentials settings (environment variable
+      // GOOGLE_APPLICATION_CREDENTIALS). Some hosts may not have the expected environment settings
+      // and this test should be skipped in that case.
+    }
   }
 }
diff --git a/exporters/trace/datadog/README.md b/exporters/trace/datadog/README.md
new file mode 100644
index 0000000..b894865
--- /dev/null
+++ b/exporters/trace/datadog/README.md
@@ -0,0 +1,73 @@
+# OpenCensus Datadog Trace Exporter
+[![Build Status][travis-image]][travis-url]
+[![Windows Build Status][appveyor-image]][appveyor-url]
+[![Maven Central][maven-image]][maven-url]
+
+The *OpenCensus Datadog Trace Exporter* is a trace exporter that exports data to [Datadog](https://www.datadoghq.com/).
+
+## Quickstart
+
+### Prerequisites
+
+Datadog collects traces using a local agent, which forwards them to the Datadog automatic tracing. Instructions for setting up the agent can be found [in the Datadog docs](https://docs.datadoghq.com/agent/?tab=agentv6).
+
+### Hello Datadog
+
+#### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-exporter-trace-datadog</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-impl</artifactId>
+    <version>0.28.3</version>
+    <scope>runtime</scope>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-datadog:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
+```
+
+#### Register the exporter
+
+```java
+public class MyMainClass {
+  public static void main(String[] args) throws Exception {
+
+    DatadogTraceConfiguration config = DatadogTraceConfiguration.builder()
+      .setAgentEndpoint("http://localhost:8126/v0.3/traces")
+      .setService("myService")
+      .setType("web")
+      .build();
+    DatadogTraceExporter.createAndRegister(config);
+    // ...
+  }
+}
+```
+
+#### Java Versions
+
+Java 8 or above is required for using this exporter.
+
+[travis-image]: https://travis-ci.org/census-instrumentation/opencensus-java.svg?branch=master
+[travis-url]: https://travis-ci.org/census-instrumentation/opencensus-java
+[appveyor-image]: https://ci.appveyor.com/api/projects/status/hxthmpkxar4jq4be/branch/master?svg=true
+[appveyor-url]: https://ci.appveyor.com/project/opencensusjavateam/opencensus-java/branch/master
+[maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-exporter-trace-datadog/badge.svg
+[maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opencensus/opencensus-exporter-trace-datadog
diff --git a/exporters/trace/datadog/build.gradle b/exporters/trace/datadog/build.gradle
new file mode 100644
index 0000000..4a07fa7
--- /dev/null
+++ b/exporters/trace/datadog/build.gradle
@@ -0,0 +1,18 @@
+description = 'OpenCensus Datadog Trace Exporter'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.8
+    it.targetCompatibility = 1.8
+}
+
+dependencies {
+    compileOnly libraries.findbugs_annotations
+
+    compile project(':opencensus-api'),
+        project(':opencensus-exporter-trace-util'),
+        libraries.guava,
+        libraries.gson, 
+        libraries.auto_value
+
+    signature "org.codehaus.mojo.signature:java18:1.0@signature"
+}
diff --git a/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandler.java b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandler.java
new file mode 100644
index 0000000..232d35f
--- /dev/null
+++ b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandler.java
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.datadog;
+
+import com.google.gson.FieldNamingPolicy;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Functions;
+import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.export.SpanData;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import javax.annotation.Nullable;
+
+@SuppressWarnings({
+  // This library is not supposed to be Android or Java 7 compatible.
+  "AndroidJdkLibsChecker",
+  "Java7ApiChecker"
+})
+final class DatadogExporterHandler extends TimeLimitedHandler {
+
+  private static final String EXPORT_SPAN_NAME = "ExportDatadogTraces";
+  private static final Gson gson =
+      new GsonBuilder()
+          .setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
+          .create();
+
+  private final URL agentEndpoint;
+  private final String service;
+  private final String type;
+
+  DatadogExporterHandler(String agentEndpoint, String service, String type, Duration deadline)
+      throws MalformedURLException {
+    super(deadline, EXPORT_SPAN_NAME);
+    this.agentEndpoint = new URL(agentEndpoint);
+    this.service = service;
+    this.type = type;
+  }
+
+  private static String attributeValueToString(AttributeValue attributeValue) {
+    return attributeValue.match(
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.throwIllegalArgumentException());
+  }
+
+  private static Map<String, String> attributesToMeta(
+      final Map<String, AttributeValue> attributes) {
+    final HashMap<String, String> result = new HashMap<>();
+    attributes.entrySet().stream()
+        .filter(entry -> entry.getValue() != null)
+        .forEach(entry -> result.put(entry.getKey(), attributeValueToString(entry.getValue())));
+    return result;
+  }
+
+  private static long convertSpanId(final SpanId spanId) {
+    final byte[] bytes = spanId.getBytes();
+    long result = 0;
+    for (int i = 0; i < Long.SIZE / Byte.SIZE; i++) {
+      result <<= Byte.SIZE;
+      result |= (bytes[i] & 0xff);
+    }
+    if (result < 0) {
+      return -result;
+    }
+    return result;
+  }
+
+  private static long timestampToNanos(final Timestamp timestamp) {
+    return TimeUnit.SECONDS.toNanos(timestamp.getSeconds()) + timestamp.getNanos();
+  }
+
+  private static Integer errorCode(@Nullable final Status status) {
+    if (status == null || status.equals(Status.OK) || status.equals(Status.ALREADY_EXISTS)) {
+      return 0;
+    }
+
+    return 1;
+  }
+
+  String convertToJson(Collection<SpanData> spanDataList) {
+    final ArrayList<DatadogSpan> datadogSpans = new ArrayList<>();
+    for (SpanData sd : spanDataList) {
+      SpanContext sc = sd.getContext();
+
+      final long startTime = timestampToNanos(sd.getStartTimestamp());
+      final Timestamp endTimestamp =
+          Optional.ofNullable(sd.getEndTimestamp()).orElseGet(() -> Tracing.getClock().now());
+      final long endTime = timestampToNanos(endTimestamp);
+      final long duration = endTime - startTime;
+
+      final Long parentId =
+          Optional.ofNullable(sd.getParentSpanId())
+              .map(DatadogExporterHandler::convertSpanId)
+              .orElse(null);
+
+      final Map<String, AttributeValue> attributes = sd.getAttributes().getAttributeMap();
+      final Map<String, String> meta =
+          attributes.isEmpty() ? new HashMap<>() : attributesToMeta(attributes);
+
+      final String resource = meta.getOrDefault("resource", "UNKNOWN");
+
+      final DatadogSpan span =
+          new DatadogSpan(
+              sc.getTraceId().getLowerLong(),
+              convertSpanId(sc.getSpanId()),
+              sd.getName(),
+              resource,
+              this.service,
+              this.type,
+              startTime,
+              duration,
+              parentId,
+              errorCode(sd.getStatus()),
+              meta);
+      datadogSpans.add(span);
+    }
+
+    final Collection<List<DatadogSpan>> traces =
+        datadogSpans.stream()
+            .collect(Collectors.groupingBy(DatadogSpan::getTraceId, Collectors.toList()))
+            .values();
+
+    return gson.toJson(traces);
+  }
+
+  @Override
+  public void timeLimitedExport(Collection<SpanData> spanDataList) throws Exception {
+    final String data = convertToJson(spanDataList);
+
+    final HttpURLConnection connection = (HttpURLConnection) agentEndpoint.openConnection();
+    connection.setRequestMethod("POST");
+    connection.setRequestProperty("Content-Type", "application/json");
+    connection.setDoOutput(true);
+    OutputStream outputStream = connection.getOutputStream();
+    outputStream.write(data.getBytes(Charset.defaultCharset()));
+    outputStream.flush();
+    outputStream.close();
+    if (connection.getResponseCode() != 200) {
+      throw new Exception("Response " + connection.getResponseCode());
+    }
+  }
+}
diff --git a/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogSpan.java b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogSpan.java
new file mode 100644
index 0000000..d687938
--- /dev/null
+++ b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogSpan.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.datadog;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.util.Map;
+import javax.annotation.Nullable;
+
+@SuppressFBWarnings("URF_UNREAD_FIELD")
+@SuppressWarnings("unused")
+class DatadogSpan {
+  // Required. The unique integer (64-bit unsigned) ID of the trace containing this span.
+  private final long traceId;
+  // Required. The span integer (64-bit unsigned) ID.
+  private final long spanId;
+  // Required. The span name. The span name must not be longer than 100 characters.
+  private final String name;
+  // Required. The resource you are tracing. The resource name must not be longer than 5000
+  // characters.
+  // A resource is a particular action for a service.
+  private final String resource;
+  // Required. The service you are tracing. The service name must not be longer than 100 characters.
+  private final String service;
+  // Required. The type of request.
+  private final String type;
+  // Required. The start time of the request in nanoseconds from the unix epoch.
+  private final long start;
+  // Required. The duration of the request in nanoseconds.
+  private final long duration;
+  // Optional. The span integer ID of the parent span.
+  @Nullable private final Long parentId;
+  // Optional. Set this value to 1 to indicate if an error occured. If an error occurs, you
+  // should pass additional information, such as the error message, type and stack information
+  // in the meta property.
+  @Nullable private final Integer error;
+  // Optional. A dictionary of key-value metadata. e.g. tags.
+  @Nullable private final Map<String, String> meta;
+
+  long getTraceId() {
+    return traceId;
+  }
+
+  DatadogSpan(
+      final long traceId,
+      final long spanId,
+      final String name,
+      final String resource,
+      final String service,
+      final String type,
+      final long start,
+      final long duration,
+      @Nullable final Long parentId,
+      @Nullable final Integer error,
+      @Nullable final Map<String, String> meta) {
+    this.traceId = traceId;
+    this.spanId = spanId;
+    this.name = name;
+    this.resource = resource;
+    this.service = service;
+    this.type = type;
+    this.start = start;
+    this.duration = duration;
+    this.parentId = parentId;
+    this.error = error;
+    this.meta = meta;
+  }
+}
diff --git a/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceConfiguration.java b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceConfiguration.java
new file mode 100644
index 0000000..face01c
--- /dev/null
+++ b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceConfiguration.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.datadog;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.opencensus.common.Duration;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configuration for {@link DatadogTraceExporter}.
+ *
+ * @since 0.19
+ */
+@AutoValue
+@Immutable
+public abstract class DatadogTraceConfiguration {
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+  @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
+  DatadogTraceConfiguration() {}
+
+  /**
+   * Returns the URL of the Datadog agent.
+   *
+   * @return the URL of the Datadog agent.
+   * @since 0.19
+   */
+  public abstract String getAgentEndpoint();
+
+  /**
+   * Returns the name of the service being traced.
+   *
+   * @return the name of the service being traced.
+   * @since 0.19
+   */
+  public abstract String getService();
+
+  /**
+   * Return the type of service being traced.
+   *
+   * @return the type of service being traced.
+   * @since 0.19
+   */
+  public abstract String getType();
+
+  /**
+   * Returns the deadline for exporting to Datadog.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Return a new {@link Builder}.
+   *
+   * @return a {@code Builder}
+   * @since 0.19
+   */
+  public static Builder builder() {
+    return new AutoValue_DatadogTraceConfiguration.Builder().setDeadline(DEFAULT_DEADLINE);
+  }
+
+  /**
+   * Builder for {@link DatadogTraceConfiguration}.
+   *
+   * @since 0.19
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    Builder() {}
+
+    /**
+     * Sets the URL to send traces to.
+     *
+     * @param agentEndpoint the URL of the agent.
+     * @return this.
+     * @since 0.19
+     */
+    public abstract Builder setAgentEndpoint(String agentEndpoint);
+
+    /**
+     * Sets the name of service being traced.
+     *
+     * @param service the name of the service being traced.
+     * @return this.
+     * @since 0.19
+     */
+    public abstract Builder setService(String service);
+
+    /**
+     * Sets the type of the service being traced.
+     *
+     * @param type the type of service being traced.
+     * @return this.
+     * @since 0.19
+     */
+    public abstract Builder setType(String type);
+
+    /**
+     * Sets the deadline for exporting to Datadog.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    abstract Duration getDeadline();
+
+    abstract DatadogTraceConfiguration autoBuild();
+
+    /**
+     * Builds a {@link DatadogTraceConfiguration}.
+     *
+     * @return a {@code DatadogTraceConfiguration}.
+     * @since 0.22
+     */
+    public DatadogTraceConfiguration build() {
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      return autoBuild();
+    }
+  }
+}
diff --git a/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceExporter.java b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceExporter.java
new file mode 100644
index 0000000..88c22bb
--- /dev/null
+++ b/exporters/trace/datadog/src/main/java/io/opencensus/exporter/trace/datadog/DatadogTraceExporter.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.datadog;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.export.SpanExporter;
+import java.net.MalformedURLException;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+
+/**
+ * An OpenCensus span exporter implementation which exports data to Datadog.
+ *
+ * <p>Example of usage:
+ *
+ * <pre>{@code
+ * public static void main(String[] args) {
+ *   DatadogTraceConfiguration config = DatadogTraceConfiguration.builder()
+ *     .setAgentEndpoint("http://localhost:8126/v0.3/traces")
+ *     .setService("myService")
+ *     .setType("web")
+ *     .build();
+ * DatadogTraceExporter.createAndRegister(config);
+ *   ... // Do work.
+ * }
+ * }</pre>
+ *
+ * @since 0.19
+ */
+public final class DatadogTraceExporter {
+
+  private static final Object monitor = new Object();
+  private static final String REGISTER_NAME = DatadogTraceExporter.class.getName();
+
+  @GuardedBy("monitor")
+  @Nullable
+  private static SpanExporter.Handler handler = null;
+
+  private DatadogTraceExporter() {}
+
+  /**
+   * Creates and registers the Datadog Trace exporter to the OpenCensus library. Only one Datadog
+   * exporter can be registered at any point.
+   *
+   * @param configuration the {@code DatadogTraceConfiguration} used to create the exporter.
+   * @throws MalformedURLException if the agent URL is invalid.
+   * @since 0.19
+   */
+  public static void createAndRegister(DatadogTraceConfiguration configuration)
+      throws MalformedURLException {
+    synchronized (monitor) {
+      checkState(handler == null, "Datadog exporter is already registered.");
+
+      String agentEndpoint = configuration.getAgentEndpoint();
+      String service = configuration.getService();
+      String type = configuration.getType();
+
+      final DatadogExporterHandler exporterHandler =
+          new DatadogExporterHandler(agentEndpoint, service, type, configuration.getDeadline());
+      handler = exporterHandler;
+      Tracing.getExportComponent()
+          .getSpanExporter()
+          .registerHandler(REGISTER_NAME, exporterHandler);
+    }
+  }
+
+  /**
+   * Unregisters the Datadog Trace exporter from the OpenCensus library.
+   *
+   * @throws IllegalStateException if a Datadog exporter is not registered.
+   * @since 0.19
+   */
+  public static void unregister() {
+    synchronized (monitor) {
+      checkState(handler != null, "Datadog exporter is not registered.");
+      Tracing.getExportComponent().getSpanExporter().unregisterHandler(REGISTER_NAME);
+      handler = null;
+    }
+  }
+}
diff --git a/exporters/trace/datadog/src/test/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandlerTest.java b/exporters/trace/datadog/src/test/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandlerTest.java
new file mode 100644
index 0000000..c2c268a
--- /dev/null
+++ b/exporters/trace/datadog/src/test/java/io/opencensus/exporter/trace/datadog/DatadogExporterHandlerTest.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.datadog;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.trace.datadog.DatadogTraceConfiguration.DEFAULT_DEADLINE;
+
+import com.google.common.collect.ImmutableMap;
+import io.opencensus.common.Timestamp;
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.export.SpanData;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DatadogExporterHandler}. */
+@RunWith(JUnit4.class)
+public class DatadogExporterHandlerTest {
+  private static final String TRACE_ID = "d239036e7d5cec116b562147388b35bf";
+  private static final String SPAN_ID = "9cc1e3049173be09";
+  private static final String PARENT_SPAN_ID = "8b03ab423da481c5";
+  private static final Map<String, AttributeValue> attributes =
+      ImmutableMap.of(
+          "http.url", AttributeValue.stringAttributeValue("http://localhost/foo"),
+          "resource", AttributeValue.stringAttributeValue("/foo"));
+  private static final List<SpanData.TimedEvent<Annotation>> annotations = Collections.emptyList();
+  private static final List<SpanData.TimedEvent<MessageEvent>> messageEvents =
+      Collections.emptyList();
+
+  private DatadogExporterHandler handler;
+
+  @Before
+  public void setup() throws Exception {
+    this.handler =
+        new DatadogExporterHandler("http://localhost", "service", "web", DEFAULT_DEADLINE);
+  }
+
+  @Test
+  public void testJsonConversion() {
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build(),
+                Tracestate.builder().build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            /* hasRemoteParent= */ true,
+            "SpanName",
+            /* kind= */ null,
+            /* startTimestamp= */ Timestamp.create(1505855794, 194009601),
+            SpanData.Attributes.create(attributes, 0),
+            SpanData.TimedEvents.create(annotations, 0),
+            SpanData.TimedEvents.create(messageEvents, 0),
+            SpanData.Links.create(Collections.emptyList(), 0),
+            /* childSpanCount= */ null,
+            Status.OK,
+            /* endTimestamp= */ Timestamp.create(1505855799, 465726528));
+
+    final String expected =
+        "[["
+            + "{"
+            + "\"trace_id\":3298601478987650031,"
+            + "\"span_id\":7151185124527981047,"
+            + "\"name\":\"SpanName\","
+            + "\"resource\":\"/foo\","
+            + "\"service\":\"service\","
+            + "\"type\":\"web\","
+            + "\"start\":1505855794194009601,"
+            + "\"duration\":5271716927,"
+            + "\"parent_id\":8429705776517054011,"
+            + "\"error\":0,"
+            + "\"meta\":{"
+            + "\"resource\":\"/foo\","
+            + "\"http.url\":\"http://localhost/foo\""
+            + "}"
+            + "}"
+            + "]]";
+
+    assertThat(handler.convertToJson(Collections.singletonList(data))).isEqualTo(expected);
+  }
+
+  @Test
+  public void testNullableConversion() {
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build(),
+                Tracestate.builder().build()),
+            /* parentSpanId= */ null,
+            /* hasRemoteParent= */ false,
+            "SpanName",
+            /* kind= */ null,
+            /* startTimestamp= */ Timestamp.create(1505855794, 194009601),
+            SpanData.Attributes.create(attributes, 0),
+            SpanData.TimedEvents.create(annotations, 0),
+            SpanData.TimedEvents.create(messageEvents, 0),
+            SpanData.Links.create(Collections.emptyList(), 0),
+            /* childSpanCount= */ null,
+            /* status= */ null,
+            /* endTimestamp= */ null);
+
+    final String expected =
+        "[["
+            + "{"
+            + "\"trace_id\":3298601478987650031,"
+            + "\"span_id\":7151185124527981047,"
+            + "\"name\":\"SpanName\","
+            + "\"resource\":\"/foo\","
+            + "\"service\":\"service\","
+            + "\"type\":\"web\","
+            + "\"start\":1505855794194009601,"
+            + "\"duration\":-1505855794194009601," // the tracer clock is set to 0 in tests
+            + "\"error\":0,"
+            + "\"meta\":{"
+            + "\"resource\":\"/foo\","
+            + "\"http.url\":\"http://localhost/foo\""
+            + "}"
+            + "}"
+            + "]]";
+
+    assertThat(handler.convertToJson(Collections.singletonList(data))).isEqualTo(expected);
+  }
+}
diff --git a/exporters/trace/elasticsearch/README.md b/exporters/trace/elasticsearch/README.md
new file mode 100644
index 0000000..ea81fcc
--- /dev/null
+++ b/exporters/trace/elasticsearch/README.md
@@ -0,0 +1,83 @@
+# OpenCensus Elasticsearch Exporter
+
+The *OpenCensus Elasticsearch trace exporter* is a trace exporter that exports
+data to [Elasticsearch](https://www.elastic.co/products/elasticsearch).
+
+Elasticsearch is a distributed, RESTful search and analytics engine.
+It centrally stores your data so you can discover the expected and uncover the unexpected.
+Using [Kibana](https://www.elastic.co/products/kibana) we can visualize our trace metrics
+with self designed dashboards and easily search as well.
+
+Once the trace is exported to Elasticsearch, you can search traces to find more data on trace.
+
+
+
+## Quickstart
+
+
+### Prerequisites
+
+#### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-exporter-trace-elasticsearch</artifactId>
+    <version>0.28.3</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-impl</artifactId>
+    <version>0.28.3</version>
+    <scope>runtime</scope>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-elasticsearch:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
+```
+
+#### Register the exporter
+
+The ElasticsearchConfig is the configurations required by the exporter.
+
+```java
+private final static String ELASTIC_SEARCH_URL= "http://localhost:9200";
+private final static String INDEX_FOR_TRACE= "opencensus";
+private final static String TYPE_FOR_TRACE= "trace";
+private final static String APP_NAME= "sample-opencensus";
+
+public static void main(String[] args) throws Exception{
+      
+  ElasticsearchTraceConfiguration elasticsearchTraceConfiguration = ElasticsearchTraceConfiguration.builder()
+  .setAppName(MICROSERVICE)
+  .setElasticsearchUrl(ELASTIC_SEARCH_URL)
+  .setElasticsearchIndex(INDEX_FOR_TRACE)
+  .setElasticsearchType(TYPE_FOR_TRACE).build();
+  ElasticsearchTraceExporter.createAndRegister(elasticsearchTraceConfiguration);
+    
+}
+```
+
+
+![Sample Traces exported to Elasticsearch](https://raw.githubusercontent.com/malike/distributed-tracing/master/opencensus/distributed_tracing_elk_discover.png?raw=true)
+
+
+#### Java Versions
+
+Java 6 or above is required for using this exporter.
+
+
diff --git a/exporters/trace/elasticsearch/build.gradle b/exporters/trace/elasticsearch/build.gradle
new file mode 100644
index 0000000..619bae0
--- /dev/null
+++ b/exporters/trace/elasticsearch/build.gradle
@@ -0,0 +1,20 @@
+description = 'OpenCensus Trace Elasticsearch Exporter'
+
+[compileJava, compileTestJava].each() {
+    it.sourceCompatibility = 1.6
+    it.targetCompatibility = 1.6
+}
+
+
+dependencies {
+
+    compileOnly libraries.auto_value
+
+    compile project(':opencensus-api'),
+            project(':opencensus-exporter-trace-util'),
+            libraries.guava
+
+    testCompile project(':opencensus-api')
+
+    signature "org.codehaus.mojo.signature:java17:+@signature"
+}
\ No newline at end of file
diff --git a/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceConfiguration.java b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceConfiguration.java
new file mode 100644
index 0000000..8822c08
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceConfiguration.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import io.opencensus.common.Duration;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@link ElasticsearchTraceExporter}.
+ *
+ * @since 0.20.0
+ */
+@AutoValue
+@Immutable
+public abstract class ElasticsearchTraceConfiguration {
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+  @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
+  /**
+   * Returns a new {@link Builder}.
+   *
+   * @return a {@code Builder}.
+   * @since 0.20.0
+   */
+  public static Builder builder() {
+    return new AutoValue_ElasticsearchTraceConfiguration.Builder().setDeadline(DEFAULT_DEADLINE);
+  }
+
+  /**
+   * Retrieves the app name configured.
+   *
+   * @return the name of app to include in traces.
+   * @since 0.20.0
+   */
+  public abstract String getAppName();
+
+  /**
+   * Retrieves user name used to access Elasticsearch.
+   *
+   * @return the username for Elasticsearch.
+   * @since 0.20.0
+   */
+  @Nullable
+  public abstract String getUserName();
+
+  /**
+   * Retrieves password used to access Elasticsearch.
+   *
+   * @return the password for Elasticsearch.
+   * @since 0.20.0
+   */
+  @Nullable
+  public abstract String getPassword();
+
+  /**
+   * Retrieves base url for Elasticsearch.
+   *
+   * @return the url for Elasticsearch.
+   * @since 0.20.0
+   */
+  public abstract String getElasticsearchUrl();
+
+  /**
+   * Retrieves index in Elasticsearch configured for storing trace data.
+   *
+   * @return the Elasticsearch index where the trace will be saved.
+   * @since 0.20.0
+   */
+  public abstract String getElasticsearchIndex();
+
+  /**
+   * Retrieves type in Elasticsearch configured for storing trace data.
+   *
+   * @return the Elasticsearch type where the trace will be saved.
+   * @since 0.20.0
+   */
+  public abstract String getElasticsearchType();
+
+  /**
+   * Returns the deadline for exporting to Elasticsearch.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Builds a {@link ElasticsearchTraceConfiguration}.
+   *
+   * @since 0.20.0
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    Builder() {}
+
+    abstract ElasticsearchTraceConfiguration autoBuild();
+
+    /**
+     * Sets the name of the app used in traces.
+     *
+     * @param appName the name of app to include in traces.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setAppName(String appName);
+
+    /**
+     * Sets the username of elasticsearch if protected.
+     *
+     * @param userName of Elasticsearch cluster.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setUserName(String userName);
+
+    /**
+     * Sets the password of elasticsearch if protected.
+     *
+     * @param password of Elasticsearch cluster.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setPassword(String password);
+
+    /**
+     * Sets the base URL of Elasticsearch.
+     *
+     * @param elasticsearchUrl URL of Elasticsearch.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setElasticsearchUrl(String elasticsearchUrl);
+
+    /**
+     * Sets the data index of Elasticsearch.
+     *
+     * @param elasticsearchIndex the Elasticsearch index.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setElasticsearchIndex(String elasticsearchIndex);
+
+    /**
+     * Sets the Elasticsearch type.
+     *
+     * @param elasticsearchType the Elasticsearch type.
+     * @return this.
+     * @since 0.20.0
+     */
+    public abstract Builder setElasticsearchType(String elasticsearchType);
+
+    /**
+     * Sets the deadline for exporting to Elasticsearch.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    /**
+     * Builder for {@link ElasticsearchTraceConfiguration}.
+     *
+     * @return a {@code ElasticsearchTraceConfiguration}.
+     * @since 0.20.0
+     */
+    public ElasticsearchTraceConfiguration build() {
+      ElasticsearchTraceConfiguration elasticsearchTraceConfiguration = autoBuild();
+      Preconditions.checkArgument(
+          !Strings.isNullOrEmpty(elasticsearchTraceConfiguration.getAppName()),
+          "Invalid App name ");
+      Preconditions.checkArgument(
+          !Strings.isNullOrEmpty(elasticsearchTraceConfiguration.getElasticsearchIndex()),
+          "Invalid Elasticsearch index.");
+      Preconditions.checkArgument(
+          !Strings.isNullOrEmpty(elasticsearchTraceConfiguration.getElasticsearchIndex()),
+          "Invalid Elasticsearch type.");
+      Preconditions.checkArgument(
+          elasticsearchTraceConfiguration.getDeadline().compareTo(ZERO) > 0,
+          "Deadline must be positive.");
+      return elasticsearchTraceConfiguration;
+    }
+  }
+}
diff --git a/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporter.java b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporter.java
new file mode 100644
index 0000000..a5c62ba
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporter.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import com.google.common.base.Preconditions;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.export.SpanExporter;
+import io.opencensus.trace.export.SpanExporter.Handler;
+import java.net.MalformedURLException;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+
+/**
+ * An OpenCensus span exporter implementation which exports data to Elasticsearch.
+ *
+ * <p>Example of usage:
+ *
+ * <pre>{@code
+ * public static void main(String[] args) throws Exception{
+ *
+ *     ElasticsearchTraceConfiguration elasticsearchTraceConfiguration =
+ *     ElasticsearchTraceConfiguration.builder().setAppName("sample-app").setElasticsearchUrl("http://localhost:9200")
+ *     .setElasticsearchIndex("opencensus-index").setElasticsearchType("traces").build();
+ *     ElasticsearchTraceExporter.createAndRegister(elasticsearchTraceConfiguration);
+ *
+ *     // Do work
+ *
+ * }
+ *
+ * }</pre>
+ *
+ * @since 0.20.0
+ */
+public final class ElasticsearchTraceExporter {
+
+  private static final String REGISTERED_TRACE_EXPORTER_NAME =
+      ElasticsearchTraceExporter.class.getName();
+  private static final Object monitor = new Object();
+
+  @GuardedBy("monitor")
+  @Nullable
+  private static Handler handler = null;
+
+  private ElasticsearchTraceExporter() {}
+
+  /**
+   * Creates and registers the ElasticsearchTraceExporter to the OpenCensus.
+   *
+   * @param elasticsearchTraceConfiguration {@link ElasticsearchTraceConfiguration}
+   * @throws MalformedURLException when the Elasticsearch url is invalid.
+   * @throws IllegalStateException if ElasticsearchTraceExporter is already created.
+   * @throws IllegalArgumentException when mandatory parameters in the configuration are missing.
+   * @since 0.20.0
+   */
+  @SuppressWarnings("nullness")
+  public static void createAndRegister(
+      ElasticsearchTraceConfiguration elasticsearchTraceConfiguration)
+      throws MalformedURLException {
+    synchronized (monitor) {
+      Preconditions.checkState(handler == null, "Elasticsearch exporter already registered.");
+      Preconditions.checkArgument(
+          elasticsearchTraceConfiguration != null, "Elasticsearch " + "configuration not set.");
+      Preconditions.checkArgument(
+          elasticsearchTraceConfiguration.getElasticsearchIndex() != null,
+          "Elasticsearch index not specified");
+      Preconditions.checkArgument(
+          elasticsearchTraceConfiguration.getElasticsearchType() != null,
+          "Elasticsearch type not specified");
+      Preconditions.checkArgument(
+          elasticsearchTraceConfiguration.getElasticsearchUrl() != null,
+          "Elasticsearch URL not specified");
+      handler = new ElasticsearchTraceHandler(elasticsearchTraceConfiguration);
+      register(Tracing.getExportComponent().getSpanExporter(), handler);
+    }
+  }
+
+  /**
+   * Registers the ElasticsearchTraceExporter.
+   *
+   * @param spanExporter instance of the {@link SpanExporter} registered.
+   * @param handler instance of the {@link Handler} registered.
+   */
+  static void register(SpanExporter spanExporter, Handler handler) {
+    spanExporter.registerHandler(REGISTERED_TRACE_EXPORTER_NAME, handler);
+  }
+
+  /**
+   * Unregisters the ElasticsearchTraceExporter from OpenCensus.
+   *
+   * @throws IllegalStateException if ElasticsearchTraceExporter is already unregistered.
+   * @since 0.20.0
+   */
+  public static void unregister() {
+    synchronized (monitor) {
+      Preconditions.checkState(
+          handler != null,
+          "Can't unregister Elasticsearch Trace Exporter which is not registered.");
+      unregister(Tracing.getExportComponent().getSpanExporter());
+      handler = null;
+    }
+  }
+
+  /**
+   * Unregisters the ElasticsearchTraceExporter from OpenCensus.
+   *
+   * @param spanExporter the instance of the {@code SpanExporter} to be unregistered.
+   * @since 0.20.0
+   */
+  static void unregister(SpanExporter spanExporter) {
+    spanExporter.unregisterHandler(REGISTERED_TRACE_EXPORTER_NAME);
+  }
+}
diff --git a/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceHandler.java b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceHandler.java
new file mode 100644
index 0000000..4920fc3
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceHandler.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import com.google.common.io.BaseEncoding;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
+import io.opencensus.trace.export.SpanData;
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.Collection;
+import java.util.List;
+import javax.annotation.Nullable;
+
+final class ElasticsearchTraceHandler extends TimeLimitedHandler {
+
+  private final ElasticsearchTraceConfiguration elasticsearchTraceConfiguration;
+  private final String appName;
+  private final URL indexUrl;
+  private static final String CONTENT_TYPE = "application/json";
+  private static final String REQUEST_METHOD = "POST";
+  private static final int CONNECTION_TIMEOUT_MILLISECONDS = 6000;
+  private static final String EXPORT_SPAN_NAME = "ExportElasticsearchTraces";
+
+  ElasticsearchTraceHandler(ElasticsearchTraceConfiguration elasticsearchTraceConfiguration)
+      throws MalformedURLException {
+    super(elasticsearchTraceConfiguration.getDeadline(), EXPORT_SPAN_NAME);
+    this.elasticsearchTraceConfiguration = elasticsearchTraceConfiguration;
+    StringBuilder sb = new StringBuilder();
+    sb.append(elasticsearchTraceConfiguration.getElasticsearchUrl()).append("/");
+    sb.append(elasticsearchTraceConfiguration.getElasticsearchIndex()).append("/");
+    sb.append(elasticsearchTraceConfiguration.getElasticsearchType()).append("/");
+    indexUrl = new URL(sb.toString());
+    appName = elasticsearchTraceConfiguration.getAppName();
+  }
+
+  /**
+   * Handles exporting of traces in {@code ElasticsearchTraceExporter}.
+   *
+   * @param spanDataList Collection of {@code SpanData} to be exported.
+   */
+  @Override
+  public void timeLimitedExport(Collection<SpanData> spanDataList) throws Exception {
+    List<String> jsonList = JsonConversionUtils.convertToJson(appName, spanDataList);
+    if (jsonList.isEmpty()) {
+      return;
+    }
+    for (String json : jsonList) {
+
+      OutputStream outputStream = null;
+      InputStream inputStream = null;
+
+      try {
+        HttpURLConnection connection = (HttpURLConnection) indexUrl.openConnection();
+        if (elasticsearchTraceConfiguration.getUserName() != null) {
+          String parameters =
+              BaseEncoding.base64()
+                  .encode(
+                      (elasticsearchTraceConfiguration.getUserName()
+                              + ":"
+                              + elasticsearchTraceConfiguration.getPassword())
+                          .getBytes("UTF-8"));
+          connection.setRequestProperty("Authorization", "Basic " + parameters);
+        }
+        connection.setRequestMethod(REQUEST_METHOD);
+        connection.setDoOutput(true);
+        connection.setConnectTimeout(CONNECTION_TIMEOUT_MILLISECONDS);
+        connection.setRequestProperty("Content-Type", CONTENT_TYPE);
+        outputStream = connection.getOutputStream();
+        outputStream.write(json.getBytes(Charset.defaultCharset()));
+        outputStream.flush();
+        inputStream = connection.getInputStream();
+        if (connection.getResponseCode() != 200) {
+          throw new Exception("Response " + connection.getResponseCode());
+        }
+      } finally {
+        closeStream(inputStream);
+        closeStream(outputStream);
+      }
+    }
+  }
+
+  // Closes an input or output stream and ignores potential IOException.
+  private static void closeStream(@Nullable Closeable stream) {
+    if (stream != null) {
+      try {
+        stream.close();
+      } catch (IOException e) {
+        // ignore
+      }
+    }
+  }
+}
diff --git a/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtils.java b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtils.java
new file mode 100644
index 0000000..db737b0
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/main/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtils.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+import io.opencensus.common.Duration;
+import io.opencensus.common.Functions;
+import io.opencensus.common.Timestamp;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Span.Kind;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.export.SpanData;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+
+/**
+ * Util to parse {@link SpanData} to json for {@link ElasticsearchTraceExporter}.
+ *
+ * @since 0.20.0
+ */
+final class JsonConversionUtils {
+
+  private static final String ELASTICSEARCH_DATE_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSZZ";
+
+  private JsonConversionUtils() {}
+
+  private static String encodeTraceId(TraceId traceId) {
+    return traceId.toLowerBase16();
+  }
+
+  private static String encodeSpanId(SpanId spanId) {
+    return spanId.toLowerBase16();
+  }
+
+  private static String toSpanName(SpanData spanData) {
+    return spanData.getName();
+  }
+
+  private static long toMillis(Timestamp timestamp) {
+    return SECONDS.toMillis(timestamp.getSeconds()) + NANOSECONDS.toMillis(timestamp.getNanos());
+  }
+
+  private static long toMillis(Timestamp start, Timestamp end) {
+    Duration duration = end.subtractTimestamp(start);
+    return duration.toMillis();
+  }
+
+  private static Date toDate(Timestamp timestamp) {
+    return new Date(toMillis(timestamp));
+  }
+
+  private static String formatDate(Timestamp timestamp) {
+    return new SimpleDateFormat(ELASTICSEARCH_DATE_PATTERN).format(toDate(timestamp));
+  }
+
+  @Nullable
+  @SuppressWarnings("nullness")
+  private static String attributeValueToString(AttributeValue attributeValue) {
+    return attributeValue.match(
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.returnToString(),
+        Functions.returnConstant(""));
+  }
+
+  private static String toSpanKind(SpanData spanData) {
+    if (spanData.getKind() == Kind.SERVER
+        || (spanData.getKind() == null && Boolean.TRUE.equals(spanData.getHasRemoteParent()))) {
+      return Span.Kind.SERVER.name();
+    }
+    if (spanData.getKind() == Kind.CLIENT) {
+      return Span.Kind.CLIENT.name();
+    }
+    return "";
+  }
+
+  /**
+   * Converts a collection of {@link SpanData} to a Collection of json string.
+   *
+   * @param appName the name of app to include in traces.
+   * @param spanDataList Collection of {@code SpanData} to be converted to json.
+   * @return Collection of {@code SpanData} converted to JSON to be indexed.
+   */
+  static List<String> convertToJson(String appName, Collection<SpanData> spanDataList) {
+    List<String> spanJson = new ArrayList<String>();
+    if (spanDataList == null) {
+      return spanJson;
+    }
+    StringBuilder sb = new StringBuilder();
+    for (final SpanData span : spanDataList) {
+      final SpanContext spanContext = span.getContext();
+      final SpanId parentSpanId = span.getParentSpanId();
+      final Timestamp startTimestamp = span.getStartTimestamp();
+      final Timestamp endTimestamp = span.getEndTimestamp();
+      final Status status = span.getStatus();
+      if (endTimestamp == null) {
+        continue;
+      }
+      sb.append('{');
+      sb.append("\"appName\":\"").append(appName).append("\",");
+      sb.append("\"spanId\":\"").append(encodeSpanId(spanContext.getSpanId())).append("\",");
+      sb.append("\"traceId\":\"").append(encodeTraceId(spanContext.getTraceId())).append("\",");
+      if (parentSpanId != null) {
+        sb.append("\"parentId\":\"").append(encodeSpanId(parentSpanId)).append("\",");
+      }
+      sb.append("\"timestamp\":").append(toMillis(startTimestamp)).append(',');
+      sb.append("\"duration\":").append(toMillis(startTimestamp, endTimestamp)).append(',');
+      sb.append("\"name\":\"").append(toSpanName(span)).append("\",");
+      sb.append("\"kind\":\"").append(toSpanKind(span)).append("\",");
+      sb.append("\"dateStarted\":\"").append(formatDate(startTimestamp)).append("\",");
+      sb.append("\"dateEnded\":\"").append(formatDate(endTimestamp)).append('"');
+      if (status == null) {
+        sb.append(",\"status\":").append("\"ok\"");
+      } else if (!status.isOk()) {
+        sb.append(",\"error\":").append("true");
+      }
+      Map<String, AttributeValue> attributeMap = span.getAttributes().getAttributeMap();
+      if (attributeMap.size() > 0) {
+        StringBuilder builder = new StringBuilder();
+        builder.append('{');
+        for (Entry<String, AttributeValue> entry : attributeMap.entrySet()) {
+          if (builder.length() > 1) {
+            builder.append(',');
+          }
+          builder
+              .append("\"")
+              .append(entry.getKey())
+              .append("\":\"")
+              .append(attributeValueToString(entry.getValue()))
+              .append("\"");
+        }
+        builder.append('}');
+        sb.append(",\"data\":").append(builder);
+      }
+      sb.append('}');
+      spanJson.add(sb.toString());
+    }
+    return spanJson;
+  }
+}
diff --git a/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporterTest.java b/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporterTest.java
new file mode 100644
index 0000000..72010c0
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/ElasticsearchTraceExporterTest.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import static org.mockito.Mockito.verify;
+
+import io.opencensus.trace.export.SpanExporter;
+import io.opencensus.trace.export.SpanExporter.Handler;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit test for {@link ElasticsearchTraceExporter}. */
+@RunWith(JUnit4.class)
+public class ElasticsearchTraceExporterTest {
+
+  private static final String REGISTERED_TRACE_EXPORTER_NAME =
+      ElasticsearchTraceExporter.class.getName();
+  private static final String SAMPLE_APP_NAME = "test-app";
+  private static final String ELASTICSEARCH_USERNAME = "username";
+  private static final String ELASTICSEARCH_PASSWORD = "passowrd";
+  private static final String ELASTICSEARCH_URL = "http://localhost:9200";
+  private static final String ELASTICSEARCH_INDEX = "opencensus";
+  private static final String ELASTICSEARCH_TYPE = "type";
+
+  @Mock private SpanExporter spanExporter;
+  private ElasticsearchTraceConfiguration elasticsearchTraceConfiguration;
+
+  @Before
+  public void setUp() {
+    elasticsearchTraceConfiguration =
+        ElasticsearchTraceConfiguration.builder()
+            .setAppName(SAMPLE_APP_NAME)
+            .setUserName(ELASTICSEARCH_USERNAME)
+            .setPassword(ELASTICSEARCH_PASSWORD)
+            .setElasticsearchUrl(ELASTICSEARCH_URL)
+            .setElasticsearchIndex(ELASTICSEARCH_INDEX)
+            .setElasticsearchType(ELASTICSEARCH_TYPE)
+            .build();
+    MockitoAnnotations.initMocks(this);
+  }
+
+  @Test
+  public void testRegisterElasticsearchExporterService() throws Exception {
+    Handler handler = new ElasticsearchTraceHandler(elasticsearchTraceConfiguration);
+    ElasticsearchTraceExporter.register(spanExporter, handler);
+    verify(spanExporter).registerHandler(REGISTERED_TRACE_EXPORTER_NAME, handler);
+  }
+
+  @Test
+  public void testUnregisterElasticsearchExporterService() throws Exception {
+    Handler handler = new ElasticsearchTraceHandler(elasticsearchTraceConfiguration);
+    ElasticsearchTraceExporter.register(spanExporter, handler);
+
+    verify(spanExporter).registerHandler(REGISTERED_TRACE_EXPORTER_NAME, handler);
+
+    ElasticsearchTraceExporter.unregister(spanExporter);
+    verify(spanExporter).unregisterHandler(REGISTERED_TRACE_EXPORTER_NAME);
+  }
+}
diff --git a/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtilsTest.java b/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtilsTest.java
new file mode 100644
index 0000000..c88c461
--- /dev/null
+++ b/exporters/trace/elasticsearch/src/test/java/io/opencensus/exporter/trace/elasticsearch/JsonConversionUtilsTest.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.elasticsearch;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.opencensus.common.Timestamp;
+import io.opencensus.trace.Annotation;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.Link;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.MessageEvent.Type;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.export.SpanData;
+import io.opencensus.trace.export.SpanData.Attributes;
+import io.opencensus.trace.export.SpanData.Links;
+import io.opencensus.trace.export.SpanData.TimedEvent;
+import io.opencensus.trace.export.SpanData.TimedEvents;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit test for {@link JsonConversionUtils}. */
+@RunWith(JUnit4.class)
+public class JsonConversionUtilsTest {
+
+  private static final String SAMPLE_APP_NAME = "test-app";
+  private static final String SAMPLE_TRACE_ID = "82bbc81f9999543682bbc81f99995436";
+  private static final String SAMPLE_SPAN_ID = "82bbc81f99995436";
+  private static final String SAMPLE_PARENT_SPAN_ID = "82bbc81f99995436";
+  private static final Map<String, AttributeValue> attributes =
+      ImmutableMap.of("data", AttributeValue.stringAttributeValue("d1"));
+  private static final List<TimedEvent<Annotation>> annotations = Collections.emptyList();
+  private static final List<TimedEvent<MessageEvent>> messageEvents =
+      ImmutableList.of(
+          TimedEvent.create(
+              Timestamp.create(155096336, 469887399),
+              MessageEvent.builder(Type.RECEIVED, 0).setCompressedMessageSize(7).build()),
+          TimedEvent.create(
+              Timestamp.create(155096336, 469887399),
+              MessageEvent.builder(Type.SENT, 0).setCompressedMessageSize(13).build()));
+  private static final TraceOptions SAMPLE_TRACE_OPTION =
+      TraceOptions.builder().setIsSampled(true).build();
+  private static final Tracestate SAMPLE_TRACE_STATE = Tracestate.builder().build();
+  private List<SpanData> spanDataList;
+
+  @Before
+  public void setUp() {
+    SpanData spanData =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(SAMPLE_TRACE_ID),
+                SpanId.fromLowerBase16(SAMPLE_SPAN_ID),
+                SAMPLE_TRACE_OPTION,
+                SAMPLE_TRACE_STATE),
+            SpanId.fromLowerBase16(SAMPLE_PARENT_SPAN_ID),
+            true,
+            "SpanName",
+            null,
+            Timestamp.create(155196336, 194009601),
+            Attributes.create(attributes, 0),
+            TimedEvents.create(annotations, 0),
+            TimedEvents.create(messageEvents, 0),
+            Links.create(Collections.<Link>emptyList(), 0),
+            null,
+            Status.OK,
+            Timestamp.create(155296336, 465726528));
+
+    spanDataList = new ArrayList<SpanData>();
+    spanDataList.add(spanData);
+  }
+
+  @Test
+  public void testConvertToJson() {
+    List<String> json = JsonConversionUtils.convertToJson(SAMPLE_APP_NAME, spanDataList);
+    Assert.assertEquals(json.size(), spanDataList.size());
+    Assert.assertTrue(json.get(0).contains("\"appName\":\"" + SAMPLE_APP_NAME + "\""));
+    Assert.assertTrue(json.get(0).contains("\"spanId\":\"" + SAMPLE_SPAN_ID + "\""));
+  }
+}
diff --git a/exporters/trace/instana/README.md b/exporters/trace/instana/README.md
index 22ace22..ce5077d 100644
--- a/exporters/trace/instana/README.md
+++ b/exporters/trace/instana/README.md
@@ -27,17 +27,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-instana</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -45,9 +45,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-trace-instana:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-instana:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
diff --git a/exporters/trace/instana/build.gradle b/exporters/trace/instana/build.gradle
index 028bc20..8d5c08b 100644
--- a/exporters/trace/instana/build.gradle
+++ b/exporters/trace/instana/build.gradle
@@ -6,10 +6,11 @@
 }
 
 dependencies {
-    compile project(':opencensus-api'),
-            libraries.guava
+    compileOnly libraries.auto_value
 
-    testCompile project(':opencensus-api')
+    compile project(':opencensus-api'),
+            project(':opencensus-exporter-trace-util'),
+            libraries.guava
 
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
diff --git a/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterConfiguration.java b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterConfiguration.java
new file mode 100644
index 0000000..8336828
--- /dev/null
+++ b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterConfiguration.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.instana;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.opencensus.common.Duration;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configuration for {@link InstanaTraceExporter}.
+ *
+ * @since 0.22
+ */
+@AutoValue
+@Immutable
+public abstract class InstanaExporterConfiguration {
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+  @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
+  InstanaExporterConfiguration() {}
+
+  /**
+   * Returns the endpoint of the Instana agent.
+   *
+   * @return the endpoint of the Instana agent.
+   * @since 0.22
+   */
+  public abstract String getAgentEndpoint();
+
+  /**
+   * Returns the deadline for exporting to Instana.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Return a new {@link Builder}.
+   *
+   * @return a {@code Builder}
+   * @since 0.22
+   */
+  public static Builder builder() {
+    return new AutoValue_InstanaExporterConfiguration.Builder().setDeadline(DEFAULT_DEADLINE);
+  }
+
+  /**
+   * Builder for {@link InstanaExporterConfiguration}.
+   *
+   * @since 0.22
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    Builder() {}
+
+    /**
+     * Sets the endpoint of Instana agent to send traces to. E.g
+     * http://localhost:42699/com.instana.plugin.generic.trace
+     *
+     * @param agentEndpoint the endpoint of the agent.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setAgentEndpoint(String agentEndpoint);
+
+    /**
+     * Sets the deadline for exporting to Instana.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    abstract Duration getDeadline();
+
+    abstract InstanaExporterConfiguration autoBuild();
+
+    /**
+     * Builds a {@link InstanaExporterConfiguration}.
+     *
+     * @return a {@code InstanaExporterConfiguration}.
+     * @since 0.22
+     */
+    public InstanaExporterConfiguration build() {
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      return autoBuild();
+    }
+  }
+}
diff --git a/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterHandler.java b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterHandler.java
index 649a026..3c54efb 100644
--- a/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterHandler.java
+++ b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaExporterHandler.java
@@ -23,20 +23,16 @@
 import io.opencensus.common.Duration;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
-import io.opencensus.common.Scope;
 import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
 import io.opencensus.trace.AttributeValue;
-import io.opencensus.trace.Sampler;
 import io.opencensus.trace.Span.Kind;
 import io.opencensus.trace.SpanContext;
 import io.opencensus.trace.SpanId;
 import io.opencensus.trace.Status;
 import io.opencensus.trace.TraceId;
-import io.opencensus.trace.Tracer;
-import io.opencensus.trace.Tracing;
 import io.opencensus.trace.export.SpanData;
-import io.opencensus.trace.export.SpanExporter;
-import io.opencensus.trace.samplers.Samplers;
+import java.io.Closeable;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -63,13 +59,13 @@
  * Major TODO is the limitation of Instana to only suport 64bit trace ids, which will be resolved.
  * Until then it is crossing fingers and treating it as 50% sampler :).
  */
-final class InstanaExporterHandler extends SpanExporter.Handler {
+final class InstanaExporterHandler extends TimeLimitedHandler {
 
-  private static final Tracer tracer = Tracing.getTracer();
-  private static final Sampler probabilitySpampler = Samplers.probabilitySampler(0.0001);
+  private static final String EXPORT_SPAN_NAME = "ExportInstanaTraces";
   private final URL agentEndpoint;
 
-  InstanaExporterHandler(URL agentEndpoint) {
+  InstanaExporterHandler(URL agentEndpoint, Duration deadline) {
+    super(deadline, EXPORT_SPAN_NAME);
     this.agentEndpoint = agentEndpoint;
   }
 
@@ -180,56 +176,36 @@
   }
 
   @Override
-  public void export(Collection<SpanData> spanDataList) {
-    // Start a new span with explicit 1/10000 sampling probability to avoid the case when user
-    // sets the default sampler to always sample and we get the gRPC span of the instana
-    // export call always sampled and go to an infinite loop.
-    Scope scope =
-        tracer.spanBuilder("ExportInstanaTraces").setSampler(probabilitySpampler).startScopedSpan();
-    try {
-      String json = convertToJson(spanDataList);
+  public void timeLimitedExport(Collection<SpanData> spanDataList) throws Exception {
+    String json = convertToJson(spanDataList);
 
-      OutputStream outputStream = null;
-      InputStream inputStream = null;
-      try {
-        HttpURLConnection connection = (HttpURLConnection) agentEndpoint.openConnection();
-        connection.setRequestMethod("POST");
-        connection.setDoOutput(true);
-        outputStream = connection.getOutputStream();
-        outputStream.write(json.getBytes(Charset.defaultCharset()));
-        outputStream.flush();
-        inputStream = connection.getInputStream();
-        if (connection.getResponseCode() != 200) {
-          tracer
-              .getCurrentSpan()
-              .setStatus(
-                  Status.UNKNOWN.withDescription("Response " + connection.getResponseCode()));
-        }
-      } catch (IOException e) {
-        tracer
-            .getCurrentSpan()
-            .setStatus(
-                Status.UNKNOWN.withDescription(
-                    e.getMessage() == null ? e.getClass().getSimpleName() : e.getMessage()));
-        // dropping span batch
-      } finally {
-        if (inputStream != null) {
-          try {
-            inputStream.close();
-          } catch (IOException e) {
-            // ignore
-          }
-        }
-        if (outputStream != null) {
-          try {
-            outputStream.close();
-          } catch (IOException e) {
-            // ignore
-          }
-        }
+    OutputStream outputStream = null;
+    InputStream inputStream = null;
+    try {
+      HttpURLConnection connection = (HttpURLConnection) agentEndpoint.openConnection();
+      connection.setRequestMethod("POST");
+      connection.setDoOutput(true);
+      outputStream = connection.getOutputStream();
+      outputStream.write(json.getBytes(Charset.defaultCharset()));
+      outputStream.flush();
+      inputStream = connection.getInputStream();
+      if (connection.getResponseCode() != 200) {
+        throw new Exception("Response " + connection.getResponseCode());
       }
     } finally {
-      scope.close();
+      closeStream(inputStream);
+      closeStream(outputStream);
+    }
+  }
+
+  // Closes an input or output stream and ignores potential IOException.
+  private static void closeStream(@javax.annotation.Nullable Closeable stream) {
+    if (stream != null) {
+      try {
+        stream.close();
+      } catch (IOException e) {
+        // ignore
+      }
     }
   }
 }
diff --git a/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaTraceExporter.java b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaTraceExporter.java
index da2ce35..dcc46d3 100644
--- a/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaTraceExporter.java
+++ b/exporters/trace/instana/src/main/java/io/opencensus/exporter/trace/instana/InstanaTraceExporter.java
@@ -34,7 +34,9 @@
  *
  * <pre>{@code
  * public static void main(String[] args) {
- *   InstanaTraceExporter.createAndRegister("http://localhost:42699/com.instana.plugin.generic.trace");
+ *   String agentEndpoint = "http://localhost:42699/com.instana.plugin.generic.trace";
+ *   InstanaTraceExporter.createAndRegister(
+ *     InstanaExporterConfiguration.builder().setAgentEndpoint(agentEndpoint).build());
  *   ... // Do work.
  * }
  * }</pre>
@@ -56,18 +58,37 @@
    * Creates and registers the Instana Trace exporter to the OpenCensus library. Only one Instana
    * exporter can be registered at any point.
    *
+   * @param configuration Configuration for InstanaTraceExporter.
+   * @throws MalformedURLException if the agentEndpoint is not a valid http url.
+   * @throws IllegalStateException if a Instana exporter is already registered.
+   * @since 0.22
+   */
+  public static void createAndRegister(InstanaExporterConfiguration configuration)
+      throws MalformedURLException {
+    synchronized (monitor) {
+      checkState(handler == null, "Instana exporter is already registered.");
+      Handler newHandler =
+          new InstanaExporterHandler(
+              new URL(configuration.getAgentEndpoint()), configuration.getDeadline());
+      handler = newHandler;
+      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
+    }
+  }
+
+  /**
+   * Creates and registers the Instana Trace exporter to the OpenCensus library. Only one Instana
+   * exporter can be registered at any point.
+   *
    * @param agentEndpoint Ex http://localhost:42699/com.instana.plugin.generic.trace
    * @throws MalformedURLException if the agentEndpoint is not a valid http url.
    * @throws IllegalStateException if a Instana exporter is already registered.
    * @since 0.12
+   * @deprecated in favor of {@link #createAndRegister(InstanaExporterConfiguration)}.
    */
+  @Deprecated
   public static void createAndRegister(String agentEndpoint) throws MalformedURLException {
-    synchronized (monitor) {
-      checkState(handler == null, "Instana exporter is already registered.");
-      Handler newHandler = new InstanaExporterHandler(new URL(agentEndpoint));
-      handler = newHandler;
-      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
-    }
+    createAndRegister(
+        InstanaExporterConfiguration.builder().setAgentEndpoint(agentEndpoint).build());
   }
 
   /**
diff --git a/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaExporterHandlerTest.java b/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaExporterHandlerTest.java
index 3b5e119..9f00003 100644
--- a/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaExporterHandlerTest.java
+++ b/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaExporterHandlerTest.java
@@ -175,4 +175,163 @@
                 + "}"
                 + "]");
   }
+
+  @Test
+  public void generateSpan_NullStatus() {
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            true, /* hasRemoteParent */
+            "SpanName", /* name */
+            null, /* kind */
+            Timestamp.create(1505855794, 194009601) /* startTimestamp */,
+            Attributes.create(attributes, 0 /* droppedAttributesCount */),
+            TimedEvents.create(annotations, 0 /* droppedEventsCount */),
+            TimedEvents.create(messageEvents, 0 /* droppedEventsCount */),
+            Links.create(Collections.<Link>emptyList(), 0 /* droppedLinksCount */),
+            null, /* childSpanCount */
+            null, /* status */
+            Timestamp.create(1505855799, 465726528) /* endTimestamp */);
+
+    assertThat(InstanaExporterHandler.convertToJson(Collections.singletonList(data)))
+        .isEqualTo("[]");
+  }
+
+  @Test
+  public void generateSpan_ErrorStatus() {
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            true, /* hasRemoteParent */
+            "SpanName", /* name */
+            Kind.CLIENT, /* kind */
+            Timestamp.create(1505855794, 194009601) /* startTimestamp */,
+            Attributes.create(attributes, 0 /* droppedAttributesCount */),
+            TimedEvents.create(annotations, 0 /* droppedEventsCount */),
+            TimedEvents.create(messageEvents, 0 /* droppedEventsCount */),
+            Links.create(Collections.<Link>emptyList(), 0 /* droppedLinksCount */),
+            null, /* childSpanCount */
+            Status.OUT_OF_RANGE, /* status, any but OK */
+            Timestamp.create(1505855799, 465726528) /* endTimestamp */);
+
+    assertThat(InstanaExporterHandler.convertToJson(Collections.singletonList(data)))
+        .isEqualTo(
+            "["
+                + "{"
+                + "\"spanId\":\"9cc1e3049173be09\","
+                + "\"traceId\":\"d239036e7d5cec11\","
+                + "\"parentId\":\"8b03ab423da481c5\","
+                + "\"timestamp\":1505855794194,"
+                + "\"duration\":5271,"
+                + "\"name\":\"SpanName\","
+                + "\"type\":\"EXIT\","
+                + "\"error\":true,"
+                + "\"data\":"
+                + "{\"http.url\":\"http://localhost/foo\"}"
+                + "}"
+                + "]");
+  }
+
+  @Test
+  public void generateSpan_MultipleSpans() {
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            true, /* hasRemoteParent */
+            "SpanName", /* name */
+            Kind.CLIENT, /* kind */
+            Timestamp.create(1505855794, 194009601) /* startTimestamp */,
+            Attributes.create(attributes, 0 /* droppedAttributesCount */),
+            TimedEvents.create(annotations, 0 /* droppedEventsCount */),
+            TimedEvents.create(messageEvents, 0 /* droppedEventsCount */),
+            Links.create(Collections.<Link>emptyList(), 0 /* droppedLinksCount */),
+            null, /* childSpanCount */
+            Status.OK,
+            Timestamp.create(1505855799, 465726528) /* endTimestamp */);
+
+    assertThat(InstanaExporterHandler.convertToJson(Collections.nCopies(2, data)))
+        .isEqualTo(
+            "["
+                + "{"
+                + "\"spanId\":\"9cc1e3049173be09\","
+                + "\"traceId\":\"d239036e7d5cec11\","
+                + "\"parentId\":\"8b03ab423da481c5\","
+                + "\"timestamp\":1505855794194,"
+                + "\"duration\":5271,"
+                + "\"name\":\"SpanName\","
+                + "\"type\":\"EXIT\","
+                + "\"data\":"
+                + "{\"http.url\":\"http://localhost/foo\"}"
+                + "},"
+                + "{"
+                + "\"spanId\":\"9cc1e3049173be09\","
+                + "\"traceId\":\"d239036e7d5cec11\","
+                + "\"parentId\":\"8b03ab423da481c5\","
+                + "\"timestamp\":1505855794194,"
+                + "\"duration\":5271,"
+                + "\"name\":\"SpanName\","
+                + "\"type\":\"EXIT\","
+                + "\"data\":"
+                + "{\"http.url\":\"http://localhost/foo\"}"
+                + "}"
+                + "]");
+  }
+
+  @Test
+  public void generateSpan_MultipleAttributes() {
+    Map<String, AttributeValue> multipleAttributes =
+        ImmutableMap.of(
+            "http.url", AttributeValue.stringAttributeValue("http://localhost/foo"),
+            "http.method", AttributeValue.stringAttributeValue("GET"));
+
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            true, /* hasRemoteParent */
+            "SpanName", /* name */
+            Kind.CLIENT, /* kind */
+            Timestamp.create(1505855794, 194009601) /* startTimestamp */,
+            Attributes.create(multipleAttributes, 0 /* droppedAttributesCount */),
+            TimedEvents.create(annotations, 0 /* droppedEventsCount */),
+            TimedEvents.create(messageEvents, 0 /* droppedEventsCount */),
+            Links.create(Collections.<Link>emptyList(), 0 /* droppedLinksCount */),
+            null, /* childSpanCount */
+            Status.OK,
+            Timestamp.create(1505855799, 465726528) /* endTimestamp */);
+
+    assertThat(InstanaExporterHandler.convertToJson(Collections.singletonList(data)))
+        .isEqualTo(
+            "["
+                + "{"
+                + "\"spanId\":\"9cc1e3049173be09\","
+                + "\"traceId\":\"d239036e7d5cec11\","
+                + "\"parentId\":\"8b03ab423da481c5\","
+                + "\"timestamp\":1505855794194,"
+                + "\"duration\":5271,"
+                + "\"name\":\"SpanName\","
+                + "\"type\":\"EXIT\","
+                + "\"data\":"
+                + "{"
+                + "\"http.url\":\"http://localhost/foo\","
+                + "\"http.method\":\"GET\""
+                + "}"
+                + "}"
+                + "]");
+  }
 }
diff --git a/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaTraceExporterTest.java b/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaTraceExporterTest.java
index a4d03df..026da70 100644
--- a/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaTraceExporterTest.java
+++ b/exporters/trace/instana/src/test/java/io/opencensus/exporter/trace/instana/InstanaTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.instana;
 
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.trace.export.SpanExporter;
diff --git a/exporters/trace/jaeger/README.md b/exporters/trace/jaeger/README.md
index 7a5b68e..b72d97b 100644
--- a/exporters/trace/jaeger/README.md
+++ b/exporters/trace/jaeger/README.md
@@ -40,17 +40,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-jaeger</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -58,9 +58,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-trace-jaeger:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-jaeger:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
diff --git a/exporters/trace/jaeger/build.gradle b/exporters/trace/jaeger/build.gradle
index 04829aa..005c762 100644
--- a/exporters/trace/jaeger/build.gradle
+++ b/exporters/trace/jaeger/build.gradle
@@ -5,19 +5,11 @@
     it.targetCompatibility = 1.6
 }
 
-// Docker tests require JDK 8+
-sourceSets {
-    test {
-        java {
-            if (!JavaVersion.current().isJava8Compatible()) {
-                exclude '**/JaegerExporterHandlerIntegrationTest.java'
-            }
-        }
-    }
-}
-
 dependencies {
+    compileOnly libraries.auto_value
+
     compile project(':opencensus-api'),
+            project(':opencensus-exporter-trace-util'),
             libraries.guava
 
     compile(libraries.jaeger_reporter) {
@@ -25,8 +17,7 @@
         exclude group: 'com.google.guava', module: 'guava'
     }
 
-    testCompile project(':opencensus-api'),
-            'org.testcontainers:testcontainers:1.7.0',
+    testCompile 'org.testcontainers:testcontainers:1.7.0',
             'com.google.http-client:google-http-client-gson:1.23.0'
 
     // Unless linked to impl, spans will be blank and not exported during integration tests.
diff --git a/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfiguration.java b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfiguration.java
new file mode 100644
index 0000000..e33d3d3
--- /dev/null
+++ b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfiguration.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.jaeger;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.jaegertracing.thrift.internal.senders.ThriftSender;
+import io.opencensus.common.Duration;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@link JaegerTraceExporter}.
+ *
+ * @since 0.22
+ */
+@AutoValue
+@Immutable
+public abstract class JaegerExporterConfiguration {
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+
+  JaegerExporterConfiguration() {}
+
+  /**
+   * Returns the service name.
+   *
+   * @return the service name.
+   * @since 0.22
+   */
+  public abstract String getServiceName();
+
+  /**
+   * Returns the Thrift endpoint of your Jaeger instance.
+   *
+   * @return the Thrift endpoint.
+   * @since 0.22
+   */
+  public abstract String getThriftEndpoint();
+
+  /**
+   * Returns the Thrift sender.
+   *
+   * @return the Thrift sender.
+   * @since 0.22
+   */
+  @Nullable
+  public abstract ThriftSender getThriftSender();
+
+  /**
+   * Returns the deadline for exporting to Jaeger.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Returns a new {@link Builder}.
+   *
+   * @return a {@code Builder}.
+   * @since 0.22
+   */
+  public static Builder builder() {
+    return new AutoValue_JaegerExporterConfiguration.Builder()
+        .setThriftEndpoint("")
+        .setDeadline(DEFAULT_DEADLINE);
+  }
+
+  /**
+   * Builder for {@link JaegerExporterConfiguration}.
+   *
+   * @since 0.22
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
+    Builder() {}
+
+    /**
+     * Sets the service name.
+     *
+     * @param serviceName the service name.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setServiceName(String serviceName);
+
+    /**
+     * Sets the Thrift endpoint of your Jaeger instance. e.g.: "http://127.0.0.1:14268/api/traces".
+     *
+     * <p>At least one of {@code ThriftEndpoint} and {@code ThriftSender} needs to be specified. If
+     * both {@code ThriftEndpoint} and {@code ThriftSender} are set, {@code ThriftSender} takes
+     * precedence.
+     *
+     * @param thriftEndpoint the Thrift endpoint.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setThriftEndpoint(String thriftEndpoint);
+
+    /**
+     * Sets the Thrift sender.
+     *
+     * <p>At least one of {@code ThriftEndpoint} and {@code ThriftSender} needs to be specified. If
+     * both {@code ThriftEndpoint} and {@code ThriftSender} are set, {@code ThriftSender} takes
+     * precedence.
+     *
+     * @param sender the Thrift sender.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setThriftSender(ThriftSender sender);
+
+    /**
+     * Sets the deadline for exporting to Jaeger.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    abstract Duration getDeadline();
+
+    abstract String getThriftEndpoint();
+
+    @Nullable
+    abstract ThriftSender getThriftSender();
+
+    abstract JaegerExporterConfiguration autoBuild();
+
+    /**
+     * Builds a {@link JaegerExporterConfiguration}.
+     *
+     * @return a {@code JaegerExporterConfiguration}.
+     * @since 0.22
+     */
+    public JaegerExporterConfiguration build() {
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      Preconditions.checkArgument(
+          !getThriftEndpoint().isEmpty() || getThriftSender() != null,
+          "Neither Thrift endpoint nor Thrift sender is specified.");
+      return autoBuild();
+    }
+  }
+}
diff --git a/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandler.java b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandler.java
index e0a1629..3ef7ee7 100644
--- a/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandler.java
+++ b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandler.java
@@ -21,59 +21,57 @@
 import static java.util.concurrent.TimeUnit.NANOSECONDS;
 import static java.util.concurrent.TimeUnit.SECONDS;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Ints;
 import com.google.common.primitives.Longs;
-import com.google.errorprone.annotations.MustBeClosed;
-import com.uber.jaeger.exceptions.SenderException;
-import com.uber.jaeger.senders.HttpSender;
-import com.uber.jaeger.thriftjava.Log;
-import com.uber.jaeger.thriftjava.Process;
-import com.uber.jaeger.thriftjava.Span;
-import com.uber.jaeger.thriftjava.SpanRef;
-import com.uber.jaeger.thriftjava.SpanRefType;
-import com.uber.jaeger.thriftjava.Tag;
-import com.uber.jaeger.thriftjava.TagType;
+import io.jaegertracing.internal.exceptions.SenderException;
+import io.jaegertracing.thrift.internal.senders.ThriftSender;
+import io.jaegertracing.thriftjava.Log;
+import io.jaegertracing.thriftjava.Process;
+import io.jaegertracing.thriftjava.Span;
+import io.jaegertracing.thriftjava.SpanRef;
+import io.jaegertracing.thriftjava.SpanRefType;
+import io.jaegertracing.thriftjava.Tag;
+import io.jaegertracing.thriftjava.TagType;
+import io.opencensus.common.Duration;
 import io.opencensus.common.Function;
-import io.opencensus.common.Scope;
 import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
 import io.opencensus.trace.Annotation;
 import io.opencensus.trace.AttributeValue;
 import io.opencensus.trace.Link;
-import io.opencensus.trace.Sampler;
+import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.MessageEvent.Type;
 import io.opencensus.trace.SpanContext;
 import io.opencensus.trace.SpanId;
 import io.opencensus.trace.Status;
 import io.opencensus.trace.TraceId;
 import io.opencensus.trace.TraceOptions;
-import io.opencensus.trace.Tracer;
-import io.opencensus.trace.Tracing;
 import io.opencensus.trace.export.SpanData;
-import io.opencensus.trace.export.SpanExporter;
-import io.opencensus.trace.samplers.Samplers;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.NotThreadSafe;
 
 @NotThreadSafe
-final class JaegerExporterHandler extends SpanExporter.Handler {
+final class JaegerExporterHandler extends TimeLimitedHandler {
   private static final String EXPORT_SPAN_NAME = "ExportJaegerTraces";
-  private static final String DESCRIPTION = "description";
-
-  private static final Logger logger = Logger.getLogger(JaegerExporterHandler.class.getName());
-
-  /**
-   * Sampler with low probability used during the export in order to avoid the case when user sets
-   * the default sampler to always sample and we get the Thrift span of the Jaeger export call
-   * always sampled and go to an infinite loop.
-   */
-  private static final Sampler lowProbabilitySampler = Samplers.probabilitySampler(0.0001);
-
-  private static final Tracer tracer = Tracing.getTracer();
+  @VisibleForTesting static final String SPAN_KIND = "span.kind";
+  private static final Tag SERVER_KIND_TAG = new Tag(SPAN_KIND, TagType.STRING).setVStr("server");
+  private static final Tag CLIENT_KIND_TAG = new Tag(SPAN_KIND, TagType.STRING).setVStr("client");
+  private static final String DESCRIPTION = "message";
+  private static final Tag RECEIVED_MESSAGE_EVENT_TAG =
+      new Tag(DESCRIPTION, TagType.STRING).setVStr("received message");
+  private static final Tag SENT_MESSAGE_EVENT_TAG =
+      new Tag(DESCRIPTION, TagType.STRING).setVStr("sent message");
+  private static final String MESSAGE_EVENT_ID = "id";
+  private static final String MESSAGE_EVENT_COMPRESSED_SIZE = "compressed_size";
+  private static final String MESSAGE_EVENT_UNCOMPRESSED_SIZE = "uncompressed_size";
+  @VisibleForTesting static final String STATUS_CODE = "status.code";
+  @VisibleForTesting static final String STATUS_MESSAGE = "status.message";
 
   private static final Function<? super String, Tag> stringAttributeConverter =
       new Function<String, Tag>() {
@@ -137,46 +135,21 @@
   private final byte[] traceIdBuffer = new byte[TraceId.SIZE];
   private final byte[] optionsBuffer = new byte[Integer.SIZE / Byte.SIZE];
 
-  private final HttpSender sender;
+  private final ThriftSender sender;
   private final Process process;
 
-  JaegerExporterHandler(final HttpSender sender, final Process process) {
+  JaegerExporterHandler(final ThriftSender sender, final Process process, Duration deadline) {
+    super(deadline, EXPORT_SPAN_NAME);
     this.sender = checkNotNull(sender, "Jaeger sender must NOT be null.");
     this.process = checkNotNull(process, "Process sending traces must NOT be null.");
   }
 
   @Override
-  public void export(final Collection<SpanData> spanDataList) {
-    final Scope exportScope = newExportScope();
-    try {
-      doExport(spanDataList);
-    } catch (SenderException e) {
-      tracer
-          .getCurrentSpan() // exportScope above.
-          .setStatus(Status.UNKNOWN.withDescription(getMessageOrDefault(e)));
-      logger.log(Level.WARNING, "Failed to export traces to Jaeger: " + e);
-    } finally {
-      exportScope.close();
-    }
-  }
-
-  @MustBeClosed
-  private static Scope newExportScope() {
-    // Start a new span with explicit sampler (with low probability) to avoid the case when user
-    // sets the default sampler to always sample and we get the Thrift span of the Jaeger
-    // export call always sampled and go to an infinite loop.
-    return tracer.spanBuilder(EXPORT_SPAN_NAME).setSampler(lowProbabilitySampler).startScopedSpan();
-  }
-
-  private void doExport(final Collection<SpanData> spanDataList) throws SenderException {
+  public void timeLimitedExport(final Collection<SpanData> spanDataList) throws SenderException {
     final List<Span> spans = spanDataToJaegerThriftSpans(spanDataList);
     sender.send(process, spans);
   }
 
-  private static String getMessageOrDefault(final SenderException e) {
-    return e.getMessage() == null ? e.getClass().getSimpleName() : e.getMessage();
-  }
-
   private List<Span> spanDataToJaegerThriftSpans(final Collection<SpanData> spanDataList) {
     final List<Span> spans = Lists.newArrayListWithExpectedSize(spanDataList.size());
     for (final SpanData spanData : spanDataList) {
@@ -192,7 +165,12 @@
     final SpanContext context = spanData.getContext();
     copyToBuffer(context.getTraceId());
 
-    return new com.uber.jaeger.thriftjava.Span(
+    List<Tag> tags =
+        attributesToTags(
+            spanData.getAttributes().getAttributeMap(), spanKindToTag(spanData.getKind()));
+    addStatusTags(tags, spanData.getStatus());
+
+    return new io.jaegertracing.thriftjava.Span(
             traceIdLow(),
             traceIdHigh(),
             spanIdToLong(context.getSpanId()),
@@ -202,8 +180,10 @@
             startTimeInMicros,
             endTimeInMicros - startTimeInMicros)
         .setReferences(linksToReferences(spanData.getLinks().getLinks()))
-        .setTags(attributesToTags(spanData.getAttributes().getAttributeMap()))
-        .setLogs(annotationEventsToLogs(spanData.getAnnotations().getEvents()));
+        .setTags(tags)
+        .setLogs(
+            timedEventsToLogs(
+                spanData.getAnnotations().getEvents(), spanData.getMessageEvents().getEvents()));
   }
 
   private void copyToBuffer(final TraceId traceId) {
@@ -282,8 +262,9 @@
         format("Failed to convert link type [%s] to a Jaeger SpanRefType.", type));
   }
 
-  private static List<Tag> attributesToTags(final Map<String, AttributeValue> attributes) {
-    final List<Tag> tags = Lists.newArrayListWithExpectedSize(attributes.size());
+  private static List<Tag> attributesToTags(
+      final Map<String, AttributeValue> attributes, @Nullable final Tag extraTag) {
+    final List<Tag> tags = Lists.newArrayListWithExpectedSize(attributes.size() + 1);
     for (final Map.Entry<String, AttributeValue> entry : attributes.entrySet()) {
       final Tag tag =
           entry
@@ -297,18 +278,46 @@
       tag.setKey(entry.getKey());
       tags.add(tag);
     }
+    if (extraTag != null) {
+      tags.add(extraTag);
+    }
     return tags;
   }
 
-  private static List<Log> annotationEventsToLogs(
-      final List<SpanData.TimedEvent<Annotation>> events) {
-    final List<Log> logs = Lists.newArrayListWithExpectedSize(events.size());
-    for (final SpanData.TimedEvent<Annotation> event : events) {
+  private static List<Log> timedEventsToLogs(
+      final List<SpanData.TimedEvent<Annotation>> annotations,
+      final List<SpanData.TimedEvent<MessageEvent>> messageEvents) {
+    final List<Log> logs =
+        Lists.newArrayListWithExpectedSize(annotations.size() + messageEvents.size());
+    for (final SpanData.TimedEvent<Annotation> event : annotations) {
       final long timestampsInMicros = timestampToMicros(event.getTimestamp());
-      final List<Tag> tags = attributesToTags(event.getEvent().getAttributes());
-      tags.add(descriptionToTag(event.getEvent().getDescription()));
-      final Log log = new Log(timestampsInMicros, tags);
-      logs.add(log);
+      logs.add(
+          new Log(
+              timestampsInMicros,
+              attributesToTags(
+                  event.getEvent().getAttributes(),
+                  descriptionToTag(event.getEvent().getDescription()))));
+    }
+    for (final SpanData.TimedEvent<MessageEvent> event : messageEvents) {
+      final long timestampsInMicros = timestampToMicros(event.getTimestamp());
+      final Tag tagMessageId =
+          new Tag(MESSAGE_EVENT_ID, TagType.LONG).setVLong(event.getEvent().getMessageId());
+      final Tag tagCompressedSize =
+          new Tag(MESSAGE_EVENT_COMPRESSED_SIZE, TagType.LONG)
+              .setVLong(event.getEvent().getCompressedMessageSize());
+      final Tag tagUncompressedSize =
+          new Tag(MESSAGE_EVENT_UNCOMPRESSED_SIZE, TagType.LONG)
+              .setVLong(event.getEvent().getUncompressedMessageSize());
+      logs.add(
+          new Log(
+              timestampsInMicros,
+              Arrays.asList(
+                  event.getEvent().getType() == Type.RECEIVED
+                      ? RECEIVED_MESSAGE_EVENT_TAG
+                      : SENT_MESSAGE_EVENT_TAG,
+                  tagMessageId,
+                  tagCompressedSize,
+                  tagUncompressedSize)));
     }
     return logs;
   }
@@ -318,4 +327,30 @@
     tag.setVStr(description);
     return tag;
   }
+
+  @Nullable
+  private static Tag spanKindToTag(@Nullable final io.opencensus.trace.Span.Kind kind) {
+    if (kind == null) {
+      return null;
+    }
+
+    switch (kind) {
+      case CLIENT:
+        return CLIENT_KIND_TAG;
+      case SERVER:
+        return SERVER_KIND_TAG;
+    }
+    return null;
+  }
+
+  private static void addStatusTags(List<Tag> tags, @Nullable Status status) {
+    if (status == null) {
+      return;
+    }
+    Tag statusTag = new Tag(STATUS_CODE, TagType.LONG).setVLong(status.getCanonicalCode().value());
+    tags.add(statusTag);
+    if (status.getDescription() != null) {
+      tags.add(new Tag(STATUS_MESSAGE, TagType.STRING).setVStr(status.getDescription()));
+    }
+  }
 }
diff --git a/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporter.java b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporter.java
index 4890f01..e9af69b 100644
--- a/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporter.java
+++ b/exporters/trace/jaeger/src/main/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporter.java
@@ -19,8 +19,9 @@
 import static com.google.common.base.Preconditions.checkState;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.uber.jaeger.senders.HttpSender;
-import com.uber.jaeger.thriftjava.Process;
+import io.jaegertracing.thrift.internal.senders.HttpSender;
+import io.jaegertracing.thrift.internal.senders.ThriftSender;
+import io.jaegertracing.thriftjava.Process;
 import io.opencensus.trace.Tracing;
 import io.opencensus.trace.export.SpanExporter;
 import javax.annotation.Nullable;
@@ -31,7 +32,11 @@
  *
  * <pre>{@code
  * public static void main(String[] args) {
- *   JaegerTraceExporter.createAndRegister("http://127.0.0.1:14268/api/traces", "myservicename");
+ *   JaegerTraceExporter.createAndRegister(
+ *     JaegerExporterConfiguration.builder().
+ *       .setThriftEndpoint("http://127.0.0.1:14268/api/traces")
+ *       .setServiceName("myservicename")
+ *       .build());
  *     ... // Do work.
  *   }
  * }</pre>
@@ -50,6 +55,29 @@
   private JaegerTraceExporter() {}
 
   /**
+   * Creates and registers the Jaeger Trace exporter to the OpenCensus library using the provided
+   * configurations.
+   *
+   * @param configuration configurations for this exporter.
+   * @throws IllegalStateException if a Jaeger exporter is already registered.
+   * @since 0.22
+   */
+  public static void createAndRegister(JaegerExporterConfiguration configuration) {
+    synchronized (monitor) {
+      checkState(handler == null, "Jaeger exporter is already registered.");
+      ThriftSender sender = configuration.getThriftSender();
+      if (sender == null) {
+        sender = new HttpSender.Builder(configuration.getThriftEndpoint()).build();
+      }
+      Process process = new Process(configuration.getServiceName());
+      SpanExporter.Handler newHandler =
+          new JaegerExporterHandler(sender, process, configuration.getDeadline());
+      JaegerTraceExporter.handler = newHandler;
+      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
+    }
+  }
+
+  /**
    * Creates and registers the Jaeger Trace exporter to the OpenCensus library. Only one Jaeger
    * exporter can be registered at any point.
    *
@@ -58,45 +86,34 @@
    * @param serviceName the local service name of the process.
    * @throws IllegalStateException if a Jaeger exporter is already registered.
    * @since 0.13
+   * @deprecated in favor of {@link #createAndRegister(JaegerExporterConfiguration)}.
    */
+  @Deprecated
   public static void createAndRegister(final String thriftEndpoint, final String serviceName) {
-    synchronized (monitor) {
-      checkState(handler == null, "Jaeger exporter is already registered.");
-      final SpanExporter.Handler newHandler = newHandler(thriftEndpoint, serviceName);
-      JaegerTraceExporter.handler = newHandler;
-      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
-    }
+    createAndRegister(
+        JaegerExporterConfiguration.builder()
+            .setThriftEndpoint(thriftEndpoint)
+            .setServiceName(serviceName)
+            .build());
   }
 
   /**
    * Creates and registers the Jaeger Trace exporter to the OpenCensus library using the provided
    * HttpSender. Only one Jaeger exporter can be registered at any point.
    *
-   * @param httpSender the pre-configured HttpSender to use with the exporter
+   * @param sender the pre-configured ThriftSender to use with the exporter
    * @param serviceName the local service name of the process.
    * @throws IllegalStateException if a Jaeger exporter is already registered.
    * @since 0.17
+   * @deprecated in favor of {@link #createAndRegister(JaegerExporterConfiguration)}.
    */
-  public static void createWithSender(final HttpSender httpSender, final String serviceName) {
-    synchronized (monitor) {
-      checkState(handler == null, "Jaeger exporter is already registered.");
-      final SpanExporter.Handler newHandler = newHandlerWithSender(httpSender, serviceName);
-      JaegerTraceExporter.handler = newHandler;
-      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
-    }
-  }
-
-  private static SpanExporter.Handler newHandler(
-      final String thriftEndpoint, final String serviceName) {
-    final HttpSender sender = new HttpSender(thriftEndpoint);
-    final Process process = new Process(serviceName);
-    return new JaegerExporterHandler(sender, process);
-  }
-
-  private static SpanExporter.Handler newHandlerWithSender(
-      final HttpSender sender, final String serviceName) {
-    final Process process = new Process(serviceName);
-    return new JaegerExporterHandler(sender, process);
+  @Deprecated
+  public static void createWithSender(final ThriftSender sender, final String serviceName) {
+    createAndRegister(
+        JaegerExporterConfiguration.builder()
+            .setThriftSender(sender)
+            .setServiceName(serviceName)
+            .build());
   }
 
   /**
diff --git a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfigurationTest.java b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfigurationTest.java
new file mode 100644
index 0000000..4a99bb5
--- /dev/null
+++ b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterConfigurationTest.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.jaeger;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.jaegertracing.thrift.internal.senders.ThriftSender;
+import io.opencensus.common.Duration;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+
+/** Unit tests for {@link JaegerExporterConfiguration}. */
+@RunWith(JUnit4.class)
+public class JaegerExporterConfigurationTest {
+
+  private static final String SERVICE = "service";
+  private static final String END_POINT = "endpoint";
+  private static final Duration ONE_MIN = Duration.create(60, 0);
+  private static final Duration NEG_ONE_MIN = Duration.create(-60, 0);
+
+  @Mock private static final ThriftSender mockSender = Mockito.mock(ThriftSender.class);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void updateConfigs() {
+    JaegerExporterConfiguration configuration =
+        JaegerExporterConfiguration.builder()
+            .setServiceName(SERVICE)
+            .setDeadline(ONE_MIN)
+            .setThriftSender(mockSender)
+            .setThriftEndpoint(END_POINT)
+            .build();
+    assertThat(configuration.getServiceName()).isEqualTo(SERVICE);
+    assertThat(configuration.getDeadline()).isEqualTo(ONE_MIN);
+    assertThat(configuration.getThriftEndpoint()).isEqualTo(END_POINT);
+    assertThat(configuration.getThriftSender()).isEqualTo(mockSender);
+  }
+
+  @Test
+  public void needEitherThriftEndpointOrSender() {
+    JaegerExporterConfiguration.Builder builder =
+        JaegerExporterConfiguration.builder().setServiceName(SERVICE);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowZeroDuration() {
+    JaegerExporterConfiguration.Builder builder =
+        JaegerExporterConfiguration.builder().setServiceName(SERVICE);
+    builder.setDeadline(JaegerExporterConfiguration.Builder.ZERO);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNegativeDuration() {
+    JaegerExporterConfiguration.Builder builder =
+        JaegerExporterConfiguration.builder().setServiceName(SERVICE);
+    builder.setDeadline(NEG_ONE_MIN);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+}
diff --git a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerIntegrationTest.java b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerIntegrationTest.java
index 9d6a797..151d155 100644
--- a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerIntegrationTest.java
+++ b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerIntegrationTest.java
@@ -21,6 +21,7 @@
 import static java.lang.String.format;
 import static java.lang.System.currentTimeMillis;
 import static java.util.concurrent.TimeUnit.MILLISECONDS;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
 
 import com.google.api.client.http.GenericUrl;
 import com.google.api.client.http.HttpRequest;
@@ -52,7 +53,7 @@
 import org.testcontainers.containers.wait.strategy.HttpWaitStrategy;
 
 public class JaegerExporterHandlerIntegrationTest {
-  private static final String JAEGER_IMAGE = "jaegertracing/all-in-one:1.3";
+  private static final String JAEGER_IMAGE = "jaegertracing/all-in-one:1.6";
   private static final int JAEGER_HTTP_PORT = 16686;
   private static final int JAEGER_HTTP_PORT_THRIFT = 14268;
   private static final String SERVICE_NAME = "test";
@@ -96,9 +97,10 @@
   }
 
   @Test
-  public void exportToJaeger() throws InterruptedException, IOException {
+  public void exportToJaeger() throws IOException, InterruptedException {
     Tracer tracer = Tracing.getTracer();
-    final long startTimeInMillis = currentTimeMillis();
+    final long startTimeInMicros = MILLISECONDS.toMicros(currentTimeMillis());
+    final long startNanoTime = System.nanoTime();
 
     SpanBuilder spanBuilder =
         tracer.spanBuilder(SPAN_NAME).setRecordEvents(true).setSampler(Samplers.alwaysSample());
@@ -118,12 +120,17 @@
       scopedSpan.close();
     }
 
-    logger.info("Wait longer than the reporting duration...");
-    // Wait for a duration longer than reporting duration (5s) to ensure spans are exported.
-    long timeWaitingForSpansToBeExportedInMillis = 5100L;
-    Thread.sleep(timeWaitingForSpansToBeExportedInMillis);
+    final long durationInMicros = NANOSECONDS.toMicros(System.nanoTime() - startNanoTime);
+    final long endTimeInMicros = startTimeInMicros + durationInMicros;
+
+    // Shutdown the export component to force a flush. This will cause problems if multiple tests
+    // are added in this class, but this is not the case for the moment.
+    Tracing.getExportComponent().shutdown();
     JaegerTraceExporter.unregister();
-    final long endTimeInMillis = currentTimeMillis();
+
+    logger.info("Wait for Jaeger to process the span...");
+    long timeWaitingForSpansToBeExportedInMillis = 1100L;
+    Thread.sleep(timeWaitingForSpansToBeExportedInMillis);
 
     // Get traces recorded by Jaeger:
     HttpRequest request =
@@ -159,45 +166,46 @@
     assertThat(span.get("flags").getAsInt()).isEqualTo(1);
     assertThat(span.get("operationName").getAsString()).isEqualTo(SPAN_NAME);
     assertThat(span.get("references").getAsJsonArray()).isEmpty();
-    assertThat(span.get("startTime").getAsLong())
-        .isAtLeast(MILLISECONDS.toMicros(startTimeInMillis));
-    assertThat(span.get("startTime").getAsLong()).isAtMost(MILLISECONDS.toMicros(endTimeInMillis));
+    assertThat(span.get("startTime").getAsLong()).isAtLeast(startTimeInMicros);
+    assertThat(span.get("startTime").getAsLong()).isAtMost(endTimeInMicros);
     assertThat(span.get("duration").getAsLong())
         .isAtLeast(MILLISECONDS.toMicros(spanDurationInMillis));
-    assertThat(span.get("duration").getAsLong())
-        .isAtMost(
-            MILLISECONDS.toMicros(spanDurationInMillis + timeWaitingForSpansToBeExportedInMillis));
+    assertThat(span.get("duration").getAsLong()).isAtMost(durationInMicros);
 
     JsonArray tags = span.get("tags").getAsJsonArray();
-    assertThat(tags.size()).isEqualTo(1);
+    assertThat(tags.size()).isEqualTo(2);
     JsonObject tag = tags.get(0).getAsJsonObject();
     assertThat(tag.get("key").getAsString()).isEqualTo("foo");
     assertThat(tag.get("type").getAsString()).isEqualTo("string");
     assertThat(tag.get("value").getAsString()).isEqualTo("bar");
+    JsonObject statusTag = tags.get(1).getAsJsonObject();
+    assertThat(statusTag.get("key").getAsString()).isEqualTo(JaegerExporterHandler.STATUS_CODE);
+    assertThat(statusTag.get("type").getAsString()).isEqualTo("int64");
+    assertThat(statusTag.get("value").getAsLong()).isEqualTo(0);
 
     JsonArray logs = span.get("logs").getAsJsonArray();
     assertThat(logs.size()).isEqualTo(2);
 
     JsonObject log1 = logs.get(0).getAsJsonObject();
     long ts1 = log1.get("timestamp").getAsLong();
-    assertThat(ts1).isAtLeast(MILLISECONDS.toMicros(startTimeInMillis));
-    assertThat(ts1).isAtMost(MILLISECONDS.toMicros(endTimeInMillis));
+    assertThat(ts1).isAtLeast(startTimeInMicros);
+    assertThat(ts1).isAtMost(endTimeInMicros);
     JsonArray fields1 = log1.get("fields").getAsJsonArray();
     assertThat(fields1.size()).isEqualTo(1);
     JsonObject field1 = fields1.get(0).getAsJsonObject();
-    assertThat(field1.get("key").getAsString()).isEqualTo("description");
+    assertThat(field1.get("key").getAsString()).isEqualTo("message");
     assertThat(field1.get("type").getAsString()).isEqualTo("string");
     assertThat(field1.get("value").getAsString()).isEqualTo(START_PROCESSING_VIDEO);
 
     JsonObject log2 = logs.get(1).getAsJsonObject();
     long ts2 = log2.get("timestamp").getAsLong();
-    assertThat(ts2).isAtLeast(MILLISECONDS.toMicros(startTimeInMillis));
-    assertThat(ts2).isAtMost(MILLISECONDS.toMicros(endTimeInMillis));
+    assertThat(ts2).isAtLeast(startTimeInMicros);
+    assertThat(ts2).isAtMost(endTimeInMicros);
     assertThat(ts2).isAtLeast(ts1);
     JsonArray fields2 = log2.get("fields").getAsJsonArray();
     assertThat(fields2.size()).isEqualTo(1);
     JsonObject field2 = fields2.get(0).getAsJsonObject();
-    assertThat(field2.get("key").getAsString()).isEqualTo("description");
+    assertThat(field2.get("key").getAsString()).isEqualTo("message");
     assertThat(field2.get("type").getAsString()).isEqualTo("string");
     assertThat(field2.get("value").getAsString()).isEqualTo(FINISHED_PROCESSING_VIDEO);
 
diff --git a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerTest.java b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerTest.java
index f918f01..c4f71fd 100644
--- a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerTest.java
+++ b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerExporterHandlerTest.java
@@ -17,40 +17,45 @@
 package io.opencensus.exporter.trace.jaeger;
 
 import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.trace.jaeger.JaegerExporterConfiguration.DEFAULT_DEADLINE;
 import static java.util.Collections.singletonList;
 import static java.util.concurrent.TimeUnit.MILLISECONDS;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
-import com.uber.jaeger.exceptions.SenderException;
-import com.uber.jaeger.senders.HttpSender;
-import com.uber.jaeger.thriftjava.Log;
-import com.uber.jaeger.thriftjava.Process;
-import com.uber.jaeger.thriftjava.Span;
-import com.uber.jaeger.thriftjava.SpanRef;
-import com.uber.jaeger.thriftjava.SpanRefType;
-import com.uber.jaeger.thriftjava.Tag;
-import com.uber.jaeger.thriftjava.TagType;
+import io.jaegertracing.internal.exceptions.SenderException;
+import io.jaegertracing.thrift.internal.senders.HttpSender;
+import io.jaegertracing.thriftjava.Log;
+import io.jaegertracing.thriftjava.Process;
+import io.jaegertracing.thriftjava.Span;
+import io.jaegertracing.thriftjava.SpanRef;
+import io.jaegertracing.thriftjava.SpanRefType;
+import io.jaegertracing.thriftjava.Tag;
+import io.jaegertracing.thriftjava.TagType;
 import io.opencensus.common.Timestamp;
 import io.opencensus.trace.Annotation;
 import io.opencensus.trace.AttributeValue;
 import io.opencensus.trace.Link;
 import io.opencensus.trace.MessageEvent;
+import io.opencensus.trace.Span.Kind;
 import io.opencensus.trace.SpanContext;
 import io.opencensus.trace.SpanId;
 import io.opencensus.trace.Status;
 import io.opencensus.trace.TraceId;
 import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
 import io.opencensus.trace.export.SpanData;
+import io.opencensus.trace.export.SpanData.TimedEvent;
+import java.util.Collections;
 import java.util.List;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.ArgumentCaptor;
 import org.mockito.Captor;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
 
 @RunWith(MockitoJUnitRunner.class)
 public class JaegerExporterHandlerTest {
@@ -58,7 +63,8 @@
 
   private final HttpSender mockSender = mock(HttpSender.class);
   private final Process process = new Process("test");
-  private final JaegerExporterHandler handler = new JaegerExporterHandler(mockSender, process);
+  private final JaegerExporterHandler handler =
+      new JaegerExporterHandler(mockSender, process, DEFAULT_DEADLINE);
 
   @Captor private ArgumentCaptor<List<Span>> captor;
 
@@ -72,6 +78,7 @@
             SpanId.fromBytes(new byte[] {(byte) 0x7F, FF, FF, FF, FF, FF, FF, FF}),
             true,
             "test",
+            Kind.SERVER,
             Timestamp.fromMillis(startTime),
             SpanData.Attributes.create(sampleAttributes(), 0),
             SpanData.TimedEvents.create(singletonList(sampleAnnotation()), 0),
@@ -98,26 +105,36 @@
     assertThat(span.startTime).isEqualTo(MILLISECONDS.toMicros(startTime));
     assertThat(span.duration).isEqualTo(MILLISECONDS.toMicros(endTime - startTime));
 
-    assertThat(span.tags.size()).isEqualTo(3);
+    assertThat(span.tags.size()).isEqualTo(5);
     assertThat(span.tags)
         .containsExactly(
             new Tag("BOOL", TagType.BOOL).setVBool(false),
             new Tag("LONG", TagType.LONG).setVLong(Long.MAX_VALUE),
+            new Tag(JaegerExporterHandler.SPAN_KIND, TagType.STRING).setVStr("server"),
             new Tag("STRING", TagType.STRING)
-                .setVStr(
-                    "Judge of a man by his questions rather than by his answers. -- Voltaire"));
+                .setVStr("Judge of a man by his questions rather than by his answers. -- Voltaire"),
+            new Tag(JaegerExporterHandler.STATUS_CODE, TagType.LONG).setVLong(0));
 
-    assertThat(span.logs.size()).isEqualTo(1);
+    assertThat(span.logs.size()).isEqualTo(2);
     Log log = span.logs.get(0);
     assertThat(log.timestamp).isEqualTo(1519629872987654L);
     assertThat(log.fields.size()).isEqualTo(4);
     assertThat(log.fields)
         .containsExactly(
-            new Tag("description", TagType.STRING).setVStr("annotation #1"),
+            new Tag("message", TagType.STRING).setVStr("annotation #1"),
             new Tag("bool", TagType.BOOL).setVBool(true),
             new Tag("long", TagType.LONG).setVLong(1337L),
             new Tag("string", TagType.STRING)
                 .setVStr("Kind words do not cost much. Yet they accomplish much. -- Pascal"));
+    log = span.logs.get(1);
+    assertThat(log.timestamp).isEqualTo(1519629871123456L);
+    assertThat(log.fields.size()).isEqualTo(4);
+    assertThat(log.fields)
+        .containsExactly(
+            new Tag("message", TagType.STRING).setVStr("sent message"),
+            new Tag("id", TagType.LONG).setVLong(42L),
+            new Tag("compressed_size", TagType.LONG).setVLong(69),
+            new Tag("uncompressed_size", TagType.LONG).setVLong(96));
 
     assertThat(span.references.size()).isEqualTo(1);
     SpanRef reference = span.references.get(0);
@@ -127,11 +144,49 @@
     assertThat(reference.refType).isEqualTo(SpanRefType.CHILD_OF);
   }
 
+  @Test
+  public void convertErrorSpanDataToJaegerThriftSpan() throws SenderException {
+    long startTime = 1519629870001L;
+    long endTime = 1519630148002L;
+    String statusMessage = "timeout";
+    SpanData spanData =
+        SpanData.create(
+            sampleSpanContext(),
+            SpanId.fromBytes(new byte[] {(byte) 0x7F, FF, FF, FF, FF, FF, FF, FF}),
+            true,
+            "test",
+            Kind.SERVER,
+            Timestamp.fromMillis(startTime),
+            SpanData.Attributes.create(Collections.<String, AttributeValue>emptyMap(), 0),
+            SpanData.TimedEvents.create(Collections.<TimedEvent<Annotation>>emptyList(), 0),
+            SpanData.TimedEvents.create(Collections.<TimedEvent<MessageEvent>>emptyList(), 0),
+            SpanData.Links.create(Collections.<Link>emptyList(), 0),
+            0,
+            Status.DEADLINE_EXCEEDED.withDescription(statusMessage),
+            Timestamp.fromMillis(endTime));
+
+    handler.export(singletonList(spanData));
+
+    verify(mockSender).send(eq(process), captor.capture());
+    List<Span> spans = captor.getValue();
+
+    assertThat(spans.size()).isEqualTo(1);
+    Span span = spans.get(0);
+
+    assertThat(span.tags.size()).isEqualTo(3);
+    assertThat(span.tags)
+        .containsExactly(
+            new Tag(JaegerExporterHandler.SPAN_KIND, TagType.STRING).setVStr("server"),
+            new Tag(JaegerExporterHandler.STATUS_CODE, TagType.LONG).setVLong(4),
+            new Tag(JaegerExporterHandler.STATUS_MESSAGE, TagType.STRING).setVStr(statusMessage));
+  }
+
   private static SpanContext sampleSpanContext() {
     return SpanContext.create(
         TraceId.fromBytes(new byte[] {FF, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}),
         SpanId.fromBytes(new byte[] {0, 0, 0, 0, 0, 0, 1, 0}),
-        TraceOptions.builder().setIsSampled(true).build());
+        TraceOptions.builder().setIsSampled(true).build(),
+        Tracestate.builder().build());
   }
 
   private static ImmutableMap<String, AttributeValue> sampleAttributes() {
@@ -159,7 +214,10 @@
   private static SpanData.TimedEvent<MessageEvent> sampleMessageEvent() {
     return SpanData.TimedEvent.create(
         Timestamp.create(1519629871L, 123456789),
-        MessageEvent.builder(MessageEvent.Type.SENT, 42L).build());
+        MessageEvent.builder(MessageEvent.Type.SENT, 42L)
+            .setCompressedMessageSize(69)
+            .setUncompressedMessageSize(96)
+            .build());
   }
 
   private static List<Link> sampleLinks() {
@@ -169,7 +227,8 @@
                 TraceId.fromBytes(
                     new byte[] {FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, FF, 0}),
                 SpanId.fromBytes(new byte[] {0, 0, 0, 0, 0, 0, 2, 0}),
-                TraceOptions.builder().setIsSampled(false).build()),
+                TraceOptions.builder().setIsSampled(false).build(),
+                Tracestate.builder().build()),
             Link.Type.CHILD_LINKED_SPAN,
             ImmutableMap.of(
                 "Bool", AttributeValue.booleanAttributeValue(true),
diff --git a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporterTest.java b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporterTest.java
index c00b013..b8b18b2 100644
--- a/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporterTest.java
+++ b/exporters/trace/jaeger/src/test/java/io/opencensus/exporter/trace/jaeger/JaegerTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.jaeger;
 
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.trace.export.SpanExporter;
diff --git a/exporters/trace/logging/README.md b/exporters/trace/logging/README.md
index 51f2566..d5b162c 100644
--- a/exporters/trace/logging/README.md
+++ b/exporters/trace/logging/README.md
@@ -15,27 +15,27 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-logging</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-trace-logging:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-logging:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 ### Register the exporter
diff --git a/exporters/trace/logging/build.gradle b/exporters/trace/logging/build.gradle
index a7fb0ff..95f100e 100644
--- a/exporters/trace/logging/build.gradle
+++ b/exporters/trace/logging/build.gradle
@@ -4,8 +4,6 @@
     compile project(':opencensus-api'),
             libraries.guava
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
\ No newline at end of file
diff --git a/exporters/trace/logging/src/test/java/io/opencensus/exporter/trace/logging/LoggingTraceExporterTest.java b/exporters/trace/logging/src/test/java/io/opencensus/exporter/trace/logging/LoggingTraceExporterTest.java
index c2b77e4..df64180 100644
--- a/exporters/trace/logging/src/test/java/io/opencensus/exporter/trace/logging/LoggingTraceExporterTest.java
+++ b/exporters/trace/logging/src/test/java/io/opencensus/exporter/trace/logging/LoggingTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.logging;
 
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.exporter.trace.logging.LoggingTraceExporter.LoggingExporterHandler;
diff --git a/exporters/trace/ocagent/README.md b/exporters/trace/ocagent/README.md
index 4f25bd6..49cd698 100644
--- a/exporters/trace/ocagent/README.md
+++ b/exporters/trace/ocagent/README.md
@@ -13,27 +13,34 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.17.0</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-ocagent</artifactId>
-    <version>0.17.0</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.17.0</version>
+    <version>0.28.3</version>
+    <scope>runtime</scope>
+  </dependency>
+  <dependency>
+    <groupId>io.netty</groupId>
+    <artifactId>netty-tcnative-boringssl-static</artifactId>
+    <version>0.28.3.Final</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
 ```
 
 For Gradle add to your dependencies:
-```gradle
-compile 'io.opencensus:opencensus-api:0.17.0'
-compile 'io.opencensus:opencensus-exporter-trace-ocagent:0.17.0'
-runtime 'io.opencensus:opencensus-impl:0.17.0'
+```groovy
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-ocagent:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
+runtime 'io.netty:netty-tcnative-boringssl-static:2.0.20.Final'
 ```
 
 ### Register the exporter
@@ -46,3 +53,11 @@
   }
 }
 ```
+
+## FAQ
+
+### Why is `netty-tcnative-boringssl-static` needed? Which version should I use?
+
+It's because this artifact depends on `grpc-netty` which requires a runtime dependency on `netty-tcnative-boringssl-static`
+for it to work. For more details and a table of known working version combinations, see
+https://github.com/grpc/grpc-java/blob/master/SECURITY.md#netty.
diff --git a/exporters/trace/ocagent/build.gradle b/exporters/trace/ocagent/build.gradle
index 777c08d..6740368 100644
--- a/exporters/trace/ocagent/build.gradle
+++ b/exporters/trace/ocagent/build.gradle
@@ -9,13 +9,31 @@
     compileOnly libraries.auto_value
 
     compile project(':opencensus-api'),
-            project(':opencensus-contrib-monitored-resource-util'),
-            libraries.grpc_core,
-            libraries.grpc_netty,
-            libraries.grpc_stub,
-            libraries.opencensus_proto
+            project(':opencensus-contrib-resource-util'),
+            project(':opencensus-exporter-trace-util')
 
-    testCompile project(':opencensus-api')
+    compile (libraries.grpc_core) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.grpc_stub) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.grpc_netty) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    compile (libraries.opencensus_proto) {
+        // We will always be more up to date.
+        exclude group: 'io.opencensus', module: 'opencensus-api'
+    }
+
+    testRuntimeOnly project(':opencensus-impl'),
+                    project(':opencensus-impl-core')
 
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
 }
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtils.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtils.java
index 6572980..f4b028a 100644
--- a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtils.java
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtils.java
@@ -19,23 +19,18 @@
 import com.google.common.annotations.VisibleForTesting;
 import io.opencensus.common.OpenCensusLibraryInformation;
 import io.opencensus.common.Timestamp;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResourceUtils;
+import io.opencensus.contrib.resource.util.ResourceUtils;
 import io.opencensus.proto.agent.common.v1.LibraryInfo;
 import io.opencensus.proto.agent.common.v1.LibraryInfo.Language;
 import io.opencensus.proto.agent.common.v1.Node;
 import io.opencensus.proto.agent.common.v1.ProcessIdentifier;
 import io.opencensus.proto.agent.common.v1.ServiceInfo;
+import io.opencensus.proto.resource.v1.Resource;
 import java.lang.management.ManagementFactory;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.security.SecureRandom;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.Map.Entry;
 import javax.annotation.Nullable;
 
 /** Utilities for detecting and creating {@link Node}. */
@@ -43,13 +38,11 @@
 
   // The current version of the OpenCensus OC-Agent Exporter.
   @VisibleForTesting
-  static final String OC_AGENT_EXPORTER_VERSION = "0.17.0-SNAPSHOT"; // CURRENT_OPENCENSUS_VERSION
-
-  @VisibleForTesting static final String RESOURCE_TYPE_ATTRIBUTE_KEY = "OPENCENSUS_SOURCE_TYPE";
-  @VisibleForTesting static final String RESOURCE_LABEL_ATTRIBUTE_KEY = "OPENCENSUS_SOURCE_LABELS";
+  static final String OC_AGENT_EXPORTER_VERSION = "0.32.0-SNAPSHOT"; // CURRENT_OPENCENSUS_VERSION
 
   @Nullable
-  private static final MonitoredResource RESOURCE = MonitoredResourceUtils.getDefaultResource();
+  private static final io.opencensus.resource.Resource AUTO_DETECTED_RESOURCE =
+      ResourceUtils.detectResource();
 
   // Creates a Node with information from the OpenCensus library and environment variables.
   static Node getNodeInfo(String serviceName) {
@@ -59,7 +52,6 @@
         .setIdentifier(getProcessIdentifier(jvmName, censusTimestamp))
         .setLibraryInfo(getLibraryInfo(OpenCensusLibraryInformation.VERSION))
         .setServiceInfo(getServiceInfo(serviceName))
-        .putAllAttributes(getAttributeMap(RESOURCE))
         .build();
   }
 
@@ -112,73 +104,26 @@
     return ServiceInfo.newBuilder().setName(serviceName).build();
   }
 
-  /*
-   * Creates an attribute map with the given MonitoredResource.
-   * If the given resource is not null, the attribute map contains exactly two entries:
-   *
-   * OPENCENSUS_SOURCE_TYPE:
-   *   A string that describes the type of the resource prefixed by a domain namespace,
-   *   e.g. “kubernetes.io/container”.
-   * OPENCENSUS_SOURCE_LABELS:
-   *   A comma-separated list of labels describing the source in more detail,
-   *   e.g. “key1=val1,key2=val2”. The allowed character set is appropriately constrained.
-   */
-  // TODO: update the resource attributes once we have an agreement on the resource specs:
-  // https://github.com/census-instrumentation/opencensus-specs/pull/162.
+  @Nullable
+  static Resource getAutoDetectedResourceProto() {
+    return toResourceProto(AUTO_DETECTED_RESOURCE);
+  }
+
+  // Converts a Java Resource object to a Resource proto.
+  @Nullable
   @VisibleForTesting
-  static Map<String, String> getAttributeMap(@Nullable MonitoredResource resource) {
-    if (resource == null) {
-      return Collections.emptyMap();
+  static Resource toResourceProto(@Nullable io.opencensus.resource.Resource resource) {
+    if (resource == null || resource.getType() == null) {
+      return null;
     } else {
-      Map<String, String> resourceAttributes = new HashMap<String, String>();
-      resourceAttributes.put(RESOURCE_TYPE_ATTRIBUTE_KEY, resource.getResourceType().name());
-      resourceAttributes.put(RESOURCE_LABEL_ATTRIBUTE_KEY, getConcatenatedResourceLabels(resource));
-      return resourceAttributes;
+      Resource.Builder resourceProtoBuilder = Resource.newBuilder();
+      resourceProtoBuilder.setType(resource.getType());
+      for (Entry<String, String> keyValuePairs : resource.getLabels().entrySet()) {
+        resourceProtoBuilder.putLabels(keyValuePairs.getKey(), keyValuePairs.getValue());
+      }
+      return resourceProtoBuilder.build();
     }
   }
 
-  // Encodes the attributes of MonitoredResource into a comma-separated list of labels.
-  // For example "aws_account=account1,instance_id=instance1,region=us-east-2".
-  private static String getConcatenatedResourceLabels(MonitoredResource resource) {
-    StringBuilder resourceLabels = new StringBuilder();
-    if (resource instanceof AwsEc2InstanceMonitoredResource) {
-      AwsEc2InstanceMonitoredResource awsEc2Resource = (AwsEc2InstanceMonitoredResource) resource;
-      putIntoBuilderIfHasValue(resourceLabels, "aws_account", awsEc2Resource.getAccount());
-      putIntoBuilderIfHasValue(resourceLabels, "instance_id", awsEc2Resource.getInstanceId());
-      putIntoBuilderIfHasValue(resourceLabels, "region", awsEc2Resource.getRegion());
-    } else if (resource instanceof GcpGceInstanceMonitoredResource) {
-      GcpGceInstanceMonitoredResource gceResource = (GcpGceInstanceMonitoredResource) resource;
-      putIntoBuilderIfHasValue(resourceLabels, "gcp_account", gceResource.getAccount());
-      putIntoBuilderIfHasValue(resourceLabels, "instance_id", gceResource.getInstanceId());
-      putIntoBuilderIfHasValue(resourceLabels, "zone", gceResource.getZone());
-    } else if (resource instanceof GcpGkeContainerMonitoredResource) {
-      GcpGkeContainerMonitoredResource gkeResource = (GcpGkeContainerMonitoredResource) resource;
-      putIntoBuilderIfHasValue(resourceLabels, "gcp_account", gkeResource.getAccount());
-      putIntoBuilderIfHasValue(resourceLabels, "instance_id", gkeResource.getInstanceId());
-      putIntoBuilderIfHasValue(resourceLabels, "location", gkeResource.getZone());
-      putIntoBuilderIfHasValue(resourceLabels, "namespace_name", gkeResource.getNamespaceId());
-      putIntoBuilderIfHasValue(resourceLabels, "cluster_name", gkeResource.getClusterName());
-      putIntoBuilderIfHasValue(resourceLabels, "container_name", gkeResource.getContainerName());
-      putIntoBuilderIfHasValue(resourceLabels, "pod_name", gkeResource.getPodId());
-    }
-    return resourceLabels.toString();
-  }
-
-  // If the given resourceValue is not empty, encodes resourceKey and resourceValue as
-  // "resourceKey:resourceValue" and puts it into the given StringBuilder. Otherwise skip the value.
-  private static void putIntoBuilderIfHasValue(
-      StringBuilder builder, String resourceKey, String resourceValue) {
-    if (resourceValue.isEmpty()) {
-      return;
-    }
-    if (!(builder.length() == 0)) {
-      // Appends the comma separator to the front, if the StringBuilder already has entries.
-      builder.append(',');
-    }
-    builder.append(resourceKey);
-    builder.append('=');
-    builder.append(resourceValue);
-  }
-
   private OcAgentNodeUtils() {}
 }
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporter.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporter.java
index 5c468de..0df00bd 100644
--- a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporter.java
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporter.java
@@ -38,7 +38,7 @@
  * }
  * }</pre>
  *
- * @since 0.17
+ * @since 0.20
  */
 @ThreadSafe
 public final class OcAgentTraceExporter {
@@ -56,14 +56,10 @@
    * Creates a {@code OcAgentTraceExporterHandler} with default configurations and registers it to
    * the OpenCensus library.
    *
-   * @since 0.17
+   * @since 0.20
    */
   public static void createAndRegister() {
-    synchronized (monitor) {
-      checkState(handler == null, "OC-Agent exporter is already registered.");
-      OcAgentTraceExporterHandler newHandler = new OcAgentTraceExporterHandler();
-      registerInternal(newHandler);
-    }
+    createAndRegister(OcAgentTraceExporterConfiguration.builder().build());
   }
 
   /**
@@ -71,7 +67,7 @@
    * the OpenCensus library.
    *
    * @param configuration the {@code OcAgentTraceExporterConfiguration}.
-   * @since 0.17
+   * @since 0.20
    */
   public static void createAndRegister(OcAgentTraceExporterConfiguration configuration) {
     synchronized (monitor) {
@@ -81,8 +77,10 @@
               configuration.getEndPoint(),
               configuration.getServiceName(),
               configuration.getUseInsecure(),
+              configuration.getSslContext(),
               configuration.getRetryInterval(),
-              configuration.getEnableConfig());
+              configuration.getEnableConfig(),
+              configuration.getDeadline());
       registerInternal(newHandler);
     }
   }
@@ -107,10 +105,13 @@
   /**
    * Unregisters the OC-Agent exporter from the OpenCensus library.
    *
-   * @since 0.17
+   * @since 0.20
    */
   public static void unregister() {
-    unregister(Tracing.getExportComponent().getSpanExporter());
+    synchronized (monitor) {
+      unregister(Tracing.getExportComponent().getSpanExporter());
+      handler = null;
+    }
   }
 
   /**
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfiguration.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfiguration.java
index c7bf1e9..3e2ed86 100644
--- a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfiguration.java
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfiguration.java
@@ -17,6 +17,9 @@
 package io.opencensus.exporter.trace.ocagent;
 
 import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.netty.handler.ssl.SslContext;
 import io.opencensus.common.Duration;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
@@ -24,72 +27,111 @@
 /**
  * Configurations for {@link OcAgentTraceExporter}.
  *
- * @since 0.17
+ * @since 0.20
  */
 @AutoValue
 @Immutable
 public abstract class OcAgentTraceExporterConfiguration {
 
+  @VisibleForTesting static final String DEFAULT_END_POINT = "localhost:55678";
+  @VisibleForTesting static final String DEFAULT_SERVICE_NAME = "OpenCensus";
+  @VisibleForTesting static final Duration DEFAULT_RETRY_INTERVAL = Duration.create(300, 0);
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+  @VisibleForTesting static final Duration ZERO = Duration.create(0, 0);
+
   OcAgentTraceExporterConfiguration() {}
 
   /**
    * Returns the end point of OC-Agent. The end point can be dns, ip:port, etc.
    *
+   * <p>Default value is "localhost:55678" if not set.
+   *
    * @return the end point of OC-Agent.
-   * @since 0.17
+   * @since 0.20
    */
-  @Nullable
   public abstract String getEndPoint();
 
   /**
    * Returns whether to disable client transport security for the exporter's gRPC connection or not.
    *
+   * <p>Default value is true if not set.
+   *
    * @return whether to disable client transport security for the exporter's gRPC connection or not.
-   * @since 0.17
+   * @since 0.20
+   */
+  public abstract Boolean getUseInsecure();
+
+  /**
+   * Returns the {@link SslContext} for secure TLS gRPC connection.
+   *
+   * <p>If not set OcAgent exporter will use insecure connection by default.
+   *
+   * @return the {@code SslContext}.
+   * @since 0.20
    */
   @Nullable
-  public abstract Boolean getUseInsecure();
+  public abstract SslContext getSslContext();
 
   /**
    * Returns the service name to be used for this {@link OcAgentTraceExporter}.
    *
+   * <p>Default value is "OpenCensus" if not set.
+   *
    * @return the service name.
-   * @since 0.17
+   * @since 0.20
    */
-  @Nullable
   public abstract String getServiceName();
 
   /**
    * Returns the retry time interval when trying to connect to Agent.
    *
+   * <p>Default value is 5 minutes.
+   *
    * @return the retry time interval.
-   * @since 0.17
+   * @since 0.20
    */
-  @Nullable
   public abstract Duration getRetryInterval();
 
   /**
    * Returns whether the {@link OcAgentTraceExporter} should handle the config streams.
    *
+   * <p>Config service is enabled by default.
+   *
    * @return whether the {@code OcAgentTraceExporter} should handle the config streams.
-   * @since 0.17
+   * @since 0.20
    */
   public abstract boolean getEnableConfig();
 
   /**
+   * Returns the deadline for exporting to Agent/Collector.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
    * Returns a new {@link Builder}.
    *
    * @return a {@code Builder}.
-   * @since 0.17
+   * @since 0.20
    */
   public static Builder builder() {
-    return new AutoValue_OcAgentTraceExporterConfiguration.Builder().setEnableConfig(true);
+    return new AutoValue_OcAgentTraceExporterConfiguration.Builder()
+        .setEndPoint(DEFAULT_END_POINT)
+        .setServiceName(DEFAULT_SERVICE_NAME)
+        .setEnableConfig(true)
+        .setUseInsecure(true)
+        .setRetryInterval(DEFAULT_RETRY_INTERVAL)
+        .setDeadline(DEFAULT_DEADLINE);
   }
 
   /**
    * Builder for {@link OcAgentTraceExporterConfiguration}.
    *
-   * @since 0.17
+   * @since 0.20
    */
   @AutoValue.Builder
   public abstract static class Builder {
@@ -101,7 +143,7 @@
      *
      * @param endPoint the end point of OC-Agent.
      * @return this.
-     * @since 0.17
+     * @since 0.20
      */
     public abstract Builder setEndPoint(String endPoint);
 
@@ -111,16 +153,25 @@
      * @param useInsecure whether disable client transport security for the exporter's gRPC
      *     connection.
      * @return this.
-     * @since 0.17
+     * @since 0.20
      */
     public abstract Builder setUseInsecure(Boolean useInsecure);
 
     /**
+     * Sets the {@link SslContext} for secure TLS gRPC connection.
+     *
+     * @param sslContext the {@code SslContext}.
+     * @return this.
+     * @since 0.20
+     */
+    public abstract Builder setSslContext(SslContext sslContext);
+
+    /**
      * Sets the service name to be used for this {@link OcAgentTraceExporter}.
      *
      * @param serviceName the service name.
      * @return this.
-     * @since 0.17
+     * @since 0.20
      */
     public abstract Builder setServiceName(String serviceName);
 
@@ -129,7 +180,7 @@
      *
      * @param retryInterval the retry time interval.
      * @return this.
-     * @since 0.17
+     * @since 0.20
      */
     public abstract Builder setRetryInterval(Duration retryInterval);
 
@@ -138,18 +189,38 @@
      *
      * @param enableConfig whether {@code OcAgentTraceExporter} should handle the config streams.
      * @return this.
-     * @since 0.17
+     * @since 0.20
      */
     public abstract Builder setEnableConfig(boolean enableConfig);
 
+    /**
+     * Sets the deadline for exporting to Agent/Collector.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
     // TODO(songya): add an option that controls whether to always keep the RPC connection alive.
 
+    abstract Duration getRetryInterval();
+
+    abstract OcAgentTraceExporterConfiguration autoBuild();
+
+    abstract Duration getDeadline();
+
     /**
      * Builds a {@link OcAgentTraceExporterConfiguration}.
      *
      * @return a {@code OcAgentTraceExporterConfiguration}.
-     * @since 0.17
+     * @since 0.20
      */
-    public abstract OcAgentTraceExporterConfiguration build();
+    public OcAgentTraceExporterConfiguration build() {
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      Preconditions.checkArgument(
+          getRetryInterval().compareTo(ZERO) > 0, "Retry interval must be positive.");
+      return autoBuild();
+    }
   }
 }
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterHandler.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterHandler.java
index 5edc06d..edc5e47 100644
--- a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterHandler.java
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterHandler.java
@@ -16,47 +16,105 @@
 
 package io.opencensus.exporter.trace.ocagent;
 
+import io.grpc.ManagedChannel;
+import io.grpc.ManagedChannelBuilder;
+import io.grpc.netty.NegotiationType;
+import io.grpc.netty.NettyChannelBuilder;
+import io.netty.handler.ssl.SslContext;
 import io.opencensus.common.Duration;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.trace.v1.ExportTraceServiceRequest;
+import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc;
 import io.opencensus.trace.export.SpanData;
-import io.opencensus.trace.export.SpanExporter.Handler;
 import java.util.Collection;
+import java.util.logging.Logger;
 import javax.annotation.Nullable;
 
 /** Exporting handler for OC-Agent Tracing. */
-final class OcAgentTraceExporterHandler extends Handler {
+final class OcAgentTraceExporterHandler extends TimeLimitedHandler {
 
-  private static final String DEFAULT_END_POINT = "localhost:55678";
-  private static final String DEFAULT_SERVICE_NAME = "OpenCensus";
-  private static final Duration DEFAULT_RETRY_INTERVAL = Duration.create(300, 0); // 5 minutes
+  private static final Logger logger =
+      Logger.getLogger(OcAgentTraceExporterHandler.class.getName());
+  private static final String EXPORT_SPAN_NAME = "ExportOpenCensusProtoSpans";
 
-  OcAgentTraceExporterHandler() {
-    this(null, null, null, null, /* enableConfig= */ true);
-  }
+  private final String endPoint;
+  private final Node node;
+  private final Boolean useInsecure;
+  @Nullable private final SslContext sslContext;
+
+  @javax.annotation.Nullable
+  private OcAgentTraceServiceExportRpcHandler exportRpcHandler; // Thread-safe
 
   OcAgentTraceExporterHandler(
-      @Nullable String endPoint,
-      @Nullable String serviceName,
-      @Nullable Boolean useInsecure,
-      @Nullable Duration retryInterval,
-      boolean enableConfig) {
-    // if (endPoint == null) {
-    //   endPoint = DEFAULT_END_POINT;
-    // }
-    // if (serviceName == null) {
-    //   serviceName = DEFAULT_SERVICE_NAME;
-    // }
-    // if (useInsecure == null) {
-    //   useInsecure = false;
-    // }
-    // if (retryInterval == null) {
-    //   retryInterval = DEFAULT_RETRY_INTERVAL;
-    // }
-    // OcAgentTraceServiceClients.startAttemptsToConnectToAgent(
-    //     endPoint, useInsecure, serviceName, retryInterval.toMillis(), enableConfig);
+      String endPoint,
+      String serviceName,
+      boolean useInsecure,
+      @Nullable SslContext sslContext,
+      Duration retryInterval,
+      boolean enableConfig,
+      Duration deadline) {
+    super(deadline, EXPORT_SPAN_NAME);
+    this.endPoint = endPoint;
+    this.node = OcAgentNodeUtils.getNodeInfo(serviceName);
+    this.useInsecure = useInsecure;
+    this.sslContext = sslContext;
   }
 
   @Override
-  public void export(Collection<SpanData> spanDataList) {
-    // OcAgentTraceServiceClients.onExport(spanDataList);
+  public void timeLimitedExport(Collection<SpanData> spanDataList) {
+    if (exportRpcHandler == null || exportRpcHandler.isCompleted()) {
+      // If not connected, try to initiate a new connection when a new batch of spans arrive.
+      // Export RPC doesn't respect the retry interval.
+      TraceServiceGrpc.TraceServiceStub stub =
+          getTraceServiceStub(endPoint, useInsecure, sslContext);
+      exportRpcHandler = createExportRpcHandlerAndConnect(stub, node);
+    }
+
+    if (exportRpcHandler == null || exportRpcHandler.isCompleted()) { // Failed to connect to Agent.
+      logger.info("Export RPC disconnected, dropping " + spanDataList.size() + " spans.");
+      exportRpcHandler = null;
+    } else { // Connection succeeded, send export request.
+      ExportTraceServiceRequest.Builder requestBuilder = ExportTraceServiceRequest.newBuilder();
+      for (SpanData spanData : spanDataList) {
+        requestBuilder.addSpans(TraceProtoUtils.toSpanProto(spanData));
+      }
+      exportRpcHandler.onExport(requestBuilder.build());
+    }
+  }
+
+  @Nullable
+  private static OcAgentTraceServiceExportRpcHandler createExportRpcHandlerAndConnect(
+      TraceServiceGrpc.TraceServiceStub stub, Node node) {
+    @Nullable OcAgentTraceServiceExportRpcHandler exportRpcHandler = null;
+    try {
+      exportRpcHandler = OcAgentTraceServiceExportRpcHandler.create(stub);
+      // First message must have Node set.
+      ExportTraceServiceRequest firstExportReq =
+          ExportTraceServiceRequest.newBuilder().setNode(node).build();
+      exportRpcHandler.onExport(firstExportReq);
+    } catch (RuntimeException e) {
+      if (exportRpcHandler != null) {
+        exportRpcHandler.onComplete(e);
+      }
+    }
+    return exportRpcHandler;
+  }
+
+  // Creates a TraceServiceStub with the given parameters.
+  // One stub can be used for both Export RPC and Config RPC.
+  private static TraceServiceGrpc.TraceServiceStub getTraceServiceStub(
+      String endPoint, Boolean useInsecure, SslContext sslContext) {
+    ManagedChannelBuilder<?> channelBuilder;
+    if (useInsecure) {
+      channelBuilder = ManagedChannelBuilder.forTarget(endPoint).usePlaintext();
+    } else {
+      channelBuilder =
+          NettyChannelBuilder.forTarget(endPoint)
+              .negotiationType(NegotiationType.TLS)
+              .sslContext(sslContext);
+    }
+    ManagedChannel channel = channelBuilder.build();
+    return TraceServiceGrpc.newStub(channel);
   }
 }
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConfigRpcHandler.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConfigRpcHandler.java
new file mode 100644
index 0000000..0bc0cd9
--- /dev/null
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConfigRpcHandler.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.ocagent;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.grpc.Status;
+import io.grpc.StatusRuntimeException;
+import io.grpc.stub.StreamObserver;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.trace.v1.CurrentLibraryConfig;
+import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc.TraceServiceStub;
+import io.opencensus.proto.agent.trace.v1.UpdatedLibraryConfig;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.config.TraceParams;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
+
+/** Handler of config service RPC. */
+@ThreadSafe
+final class OcAgentTraceServiceConfigRpcHandler {
+
+  private static final Logger logger =
+      Logger.getLogger(OcAgentTraceServiceConfigRpcHandler.class.getName());
+
+  // A reference to the currentConfigObserver returned from stub.
+  @GuardedBy("this")
+  @Nullable
+  private StreamObserver<CurrentLibraryConfig> currentConfigObserver;
+
+  // The RPC status when this stream finishes/disconnects. Null if the stream is still connected.
+  @GuardedBy("this")
+  @Nullable
+  private Status terminateStatus;
+
+  private final TraceConfig traceConfig; // Inject a mock TraceConfig in unit tests.
+
+  private OcAgentTraceServiceConfigRpcHandler(TraceConfig traceConfig) {
+    this.traceConfig = traceConfig;
+  }
+
+  private synchronized void setCurrentConfigObserver(
+      StreamObserver<CurrentLibraryConfig> currentConfigObserver) {
+    this.currentConfigObserver = currentConfigObserver;
+  }
+
+  // Creates an OcAgentTraceServiceConfigRpcHandler. Tries to initiate the config stream with the
+  // given TraceServiceStub.
+  static OcAgentTraceServiceConfigRpcHandler create(
+      TraceServiceStub stub, TraceConfig traceConfig) {
+    OcAgentTraceServiceConfigRpcHandler configRpcHandler =
+        new OcAgentTraceServiceConfigRpcHandler(traceConfig);
+    UpdatedLibraryConfigObserver updatedLibraryConfigObserver =
+        new UpdatedLibraryConfigObserver(traceConfig, configRpcHandler);
+    try {
+      StreamObserver<CurrentLibraryConfig> currentConfigObserver =
+          stub.config(updatedLibraryConfigObserver);
+      configRpcHandler.setCurrentConfigObserver(currentConfigObserver);
+    } catch (StatusRuntimeException e) {
+      configRpcHandler.onComplete(e);
+    }
+    return configRpcHandler;
+  }
+
+  // Sends the initial config message with Node to Agent.
+  // Once the initial config message is sent, the current thread will be blocked watching for
+  // subsequent updated library configs, unless the stream is interrupted.
+  synchronized void sendInitialMessage(Node node) {
+    io.opencensus.proto.trace.v1.TraceConfig currentTraceConfigProto =
+        TraceProtoUtils.getCurrentTraceConfig(traceConfig);
+    // First config must have Node set.
+    CurrentLibraryConfig firstConfig =
+        CurrentLibraryConfig.newBuilder().setNode(node).setConfig(currentTraceConfigProto).build();
+    sendCurrentConfig(firstConfig);
+  }
+
+  // Follow up after applying the updated library config.
+  private synchronized void sendCurrentConfig() {
+    // Bouncing back CurrentLibraryConfig to Agent.
+    io.opencensus.proto.trace.v1.TraceConfig currentTraceConfigProto =
+        TraceProtoUtils.getCurrentTraceConfig(traceConfig);
+    CurrentLibraryConfig currentLibraryConfig =
+        CurrentLibraryConfig.newBuilder().setConfig(currentTraceConfigProto).build();
+    sendCurrentConfig(currentLibraryConfig);
+  }
+
+  // Sends current config to Agent if the stream is still connected, otherwise do nothing.
+  private synchronized void sendCurrentConfig(CurrentLibraryConfig currentLibraryConfig) {
+    if (isCompleted() || currentConfigObserver == null) {
+      return;
+    }
+    try {
+      currentConfigObserver.onNext(currentLibraryConfig);
+    } catch (Exception e) { // Catch client side exceptions.
+      onComplete(e);
+    }
+  }
+
+  // Marks this config stream as completed with an optional error.
+  // Once onComplete is called, this OcAgentTraceServiceConfigRpcHandler instance can be discarded
+  // and GC'ed in the worker thread.
+  synchronized void onComplete(@javax.annotation.Nullable Throwable error) {
+    if (isCompleted()) {
+      return;
+    }
+    currentConfigObserver = null;
+    // TODO(songya): add Runnable
+    Status status;
+    if (error == null) {
+      status = Status.OK;
+    } else if (error instanceof StatusRuntimeException) {
+      status = ((StatusRuntimeException) error).getStatus();
+    } else {
+      status = Status.UNKNOWN;
+    }
+    terminateStatus = status;
+  }
+
+  synchronized boolean isCompleted() {
+    return terminateStatus != null;
+  }
+
+  @VisibleForTesting
+  @Nullable
+  synchronized Status getTerminateStatus() {
+    return terminateStatus;
+  }
+
+  @VisibleForTesting
+  static class UpdatedLibraryConfigObserver implements StreamObserver<UpdatedLibraryConfig> {
+
+    private final TraceConfig traceConfig;
+    private final OcAgentTraceServiceConfigRpcHandler configRpcHandler;
+
+    @VisibleForTesting
+    UpdatedLibraryConfigObserver(
+        TraceConfig traceConfig, OcAgentTraceServiceConfigRpcHandler configRpcHandler) {
+      this.traceConfig = traceConfig;
+      this.configRpcHandler = configRpcHandler;
+    }
+
+    @Override
+    public void onNext(UpdatedLibraryConfig value) {
+      // First, apply the incoming updated config.
+      TraceParams updatedTraceParams = TraceProtoUtils.getUpdatedTraceParams(value, traceConfig);
+      traceConfig.updateActiveTraceParams(updatedTraceParams);
+
+      // Then echo back current config.
+      configRpcHandler.sendCurrentConfig();
+    }
+
+    @Override
+    public void onError(Throwable t) {
+      logger.log(Level.WARNING, "Config stream is disconnected.", t);
+      configRpcHandler.onComplete(t);
+    }
+
+    @Override
+    public void onCompleted() {
+      configRpcHandler.onComplete(null);
+    }
+  }
+}
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConnectionWorker.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConnectionWorker.java
new file mode 100644
index 0000000..6be9ec3
--- /dev/null
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceConnectionWorker.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.ocagent;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Daemon worker thread that tries to connect to OC-Agent at a configured time interval
+ * (retryIntervalMillis). Unless interrupted, this thread will be running forever.
+ */
+final class OcAgentTraceServiceConnectionWorker extends Thread {
+
+  /*
+   * We should use ManagedChannel.getState() to determine the state of current connection.
+   *
+   * 0. For export RPC we have ExportRpcHandler which holds references to request and response
+   * observers. For config RPC we have ConfigRpcHandler.
+   *
+   * Think of the whole process as a Finite State Machine.
+   *
+   * Note: "Exporter" = Agent Exporter and "Agent" = OC-Agent in the sections below.
+   *
+   * STATE 1: Unconnected/Disconnected.
+   *
+   * 1. First, Exporter will try to establish the streams in a daemon thread.
+   * This is done when ExportRpcHandler and ConfigRpcHandler are created,
+   * by calling TraceServiceStub.export and TraceServiceStub.config.
+   *
+   *   1-1. If the initial attempt succeeded, Exporter should receive messages from the Agent, and
+   *   ExportRpcHandler and ConfigRpcHandler should be created successfully.
+   *
+   *   1-2. If the attempt failed, TraceServiceStub.export or TraceServiceStub.config will throw
+   *   an exception. We should catch the exceptions and keep retrying.
+   *   ExportRpcHandler and ConfigRpcHandler will store the exiting RPC status and pass it to a
+   *   Runnable defined by the daemon worker.
+   *
+   *   1-3. After each attempt, we should check if the connection is established with
+   *   ManagedChannel.getState(). (This is done in the daemon thread.)
+   *   If connection succeeded, we can move forward and start sending/receiving streams
+   *   (move to STATE 2). Otherwise Exporter should retry.
+   *
+   * STATE 2: Already connected.
+   *
+   * 2. Once streams are open, they should be kept alive. The daemon worker should be blocked and
+   * watching UpdatedLibraryConfig messages from Agent.
+   *
+   *   2-1. If for some reason the connection is interrupted or ended, the errors (if any) will be
+   *   caught by ExportRpcHandler and ConfigRpcHandler. They will store the exiting RPC status and
+   *   pass it to a Runnable defined by the daemon worker. At that time the ExportRpcHandler and
+   *   ConfigRpcHandler will be considered completed.
+   *
+   *   Then we will create a new channel and stub, try to connect to Agent, and try to create new
+   *   ExportRpcHandler and ConfigRpcHandler. (Back to STATE 1.)
+   *
+   * FYI the method signatures on both sides:
+   *
+   * Agent has:
+   *
+   * public abstract static class TraceServiceImplBase {
+   *
+   *   public abstract StreamObserver<CurrentLibraryConfig> config(
+   *     StreamObserver<UpdatedLibraryConfig> responseObserver);
+   *
+   *   public abstract StreamObserver<ExportTraceServiceRequest> export(
+   *     StreamObserver<ExportTraceServiceResponse> responseObserver);
+   * }
+   *
+   * Exporter has:
+   *
+   * public static final class TraceServiceStub {
+   *
+   *   public StreamObserver<CurrentLibraryConfig> config(
+   *     StreamObserver<UpdatedLibraryConfig> responseObserver) {
+   *     // implementation
+   *   }
+   *
+   *   public StreamObserver<ExportTraceServiceRequest> export(
+   *     StreamObserver<ExportTraceServiceResponse> responseObserver) {
+   *     // implementation
+   *   }
+   *
+   * }
+   */
+
+  // private final String endPoint;
+  // private final boolean useInsecure;
+  // private final Node node;
+  private final long retryIntervalMillis;
+  // private final boolean enableConfig;
+
+  @VisibleForTesting
+  OcAgentTraceServiceConnectionWorker(
+      String endPoint,
+      boolean useInsecure,
+      String serviceName,
+      long retryIntervalMillis,
+      boolean enableConfig) {
+    // this.endPoint = endPoint;
+    // this.useInsecure = useInsecure;
+    // this.node = OcAgentNodeUtils.getNodeInfo(serviceName);
+    this.retryIntervalMillis = retryIntervalMillis;
+    // this.enableConfig = enableConfig;
+    setDaemon(true);
+    setName("OcAgentTraceServiceConnectionWorker");
+  }
+
+  static void startThread(
+      String endPoint,
+      boolean useInsecure,
+      String serviceName,
+      long retryIntervalMillis,
+      boolean enableConfig) {
+    new OcAgentTraceServiceConnectionWorker(
+            endPoint, useInsecure, serviceName, retryIntervalMillis, enableConfig)
+        .start();
+  }
+
+  @Override
+  public void run() {
+    try {
+      // Infinite outer loop to keep this thread alive.
+      // This thread should never exit unless interrupted.
+      while (true) {
+
+        // TODO(songya): implement this.
+
+        // Retry connection after the configured time interval.
+        Thread.sleep(retryIntervalMillis);
+      }
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+    }
+  }
+}
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceExportRpcHandler.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceExportRpcHandler.java
new file mode 100644
index 0000000..fcb3c4a
--- /dev/null
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceExportRpcHandler.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.ocagent;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.grpc.Status;
+import io.grpc.StatusRuntimeException;
+import io.grpc.stub.StreamObserver;
+import io.opencensus.proto.agent.trace.v1.ExportTraceServiceRequest;
+import io.opencensus.proto.agent.trace.v1.ExportTraceServiceResponse;
+import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc.TraceServiceStub;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
+
+/** Handler of export service RPC. */
+@ThreadSafe
+final class OcAgentTraceServiceExportRpcHandler {
+
+  private static final Logger logger =
+      Logger.getLogger(OcAgentTraceServiceExportRpcHandler.class.getName());
+
+  // A reference to the exportRequestObserver returned from stub.
+  @GuardedBy("this")
+  @Nullable
+  private StreamObserver<ExportTraceServiceRequest> exportRequestObserver;
+
+  // The RPC status when this stream finishes/disconnects. Null if the stream is still connected.
+  @GuardedBy("this")
+  @Nullable
+  private Status terminateStatus;
+
+  private OcAgentTraceServiceExportRpcHandler() {}
+
+  private synchronized void setExportRequestObserver(
+      StreamObserver<ExportTraceServiceRequest> exportRequestObserver) {
+    this.exportRequestObserver = exportRequestObserver;
+  }
+
+  // Creates an OcAgentTraceServiceExportRpcHandler. Tries to initiate the export stream with the
+  // given TraceServiceStub.
+  static OcAgentTraceServiceExportRpcHandler create(TraceServiceStub stub) {
+    OcAgentTraceServiceExportRpcHandler exportRpcHandler =
+        new OcAgentTraceServiceExportRpcHandler();
+    ExportResponseObserver exportResponseObserver = new ExportResponseObserver(exportRpcHandler);
+    try {
+      StreamObserver<ExportTraceServiceRequest> exportRequestObserver =
+          stub.export(exportResponseObserver);
+      exportRpcHandler.setExportRequestObserver(exportRequestObserver);
+    } catch (StatusRuntimeException e) {
+      exportRpcHandler.onComplete(e);
+    }
+    return exportRpcHandler;
+  }
+
+  // Sends the export request to Agent if the stream is still connected, otherwise do nothing.
+  synchronized void onExport(ExportTraceServiceRequest request) {
+    if (isCompleted() || exportRequestObserver == null) {
+      return;
+    }
+    try {
+      exportRequestObserver.onNext(request);
+    } catch (Exception e) { // Catch client side exceptions.
+      onComplete(e);
+    }
+  }
+
+  // Marks this export stream as completed with an optional error.
+  // Once onComplete is called, this OcAgentTraceServiceExportRpcHandler instance can be discarded
+  // and GC'ed in the worker thread.
+  synchronized void onComplete(@javax.annotation.Nullable Throwable error) {
+    if (isCompleted()) {
+      return;
+    }
+    // TODO(songya): add Runnable
+    Status status;
+    if (error == null) {
+      status = Status.OK;
+    } else if (error instanceof StatusRuntimeException) {
+      status = ((StatusRuntimeException) error).getStatus();
+    } else {
+      status = Status.UNKNOWN;
+    }
+    terminateStatus = status;
+  }
+
+  synchronized boolean isCompleted() {
+    return terminateStatus != null;
+  }
+
+  @VisibleForTesting
+  @Nullable
+  synchronized Status getTerminateStatus() {
+    return terminateStatus;
+  }
+
+  @VisibleForTesting
+  static class ExportResponseObserver implements StreamObserver<ExportTraceServiceResponse> {
+
+    private final OcAgentTraceServiceExportRpcHandler exportRpcHandler;
+
+    ExportResponseObserver(OcAgentTraceServiceExportRpcHandler exportRpcHandler) {
+      this.exportRpcHandler = exportRpcHandler;
+    }
+
+    @Override
+    public void onNext(ExportTraceServiceResponse value) {
+      // Do nothing since ExportTraceServiceResponse is an empty message.
+    }
+
+    @Override
+    public void onError(Throwable t) {
+      logger.log(Level.WARNING, "Export stream is disconnected.", t);
+      exportRpcHandler.onComplete(t);
+    }
+
+    @Override
+    public void onCompleted() {
+      exportRpcHandler.onComplete(null);
+    }
+  }
+}
diff --git a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtils.java b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtils.java
index ec778ba..ac17be2 100644
--- a/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtils.java
+++ b/exporters/trace/ocagent/src/main/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtils.java
@@ -26,6 +26,7 @@
 import io.opencensus.proto.agent.trace.v1.UpdatedLibraryConfig;
 import io.opencensus.proto.trace.v1.AttributeValue;
 import io.opencensus.proto.trace.v1.ConstantSampler;
+import io.opencensus.proto.trace.v1.ConstantSampler.ConstantDecision;
 import io.opencensus.proto.trace.v1.ProbabilitySampler;
 import io.opencensus.proto.trace.v1.Span;
 import io.opencensus.proto.trace.v1.Span.Attributes;
@@ -323,10 +324,10 @@
 
     if (Samplers.alwaysSample().equals(librarySampler)) {
       traceConfigProtoBuilder.setConstantSampler(
-          ConstantSampler.newBuilder().setDecision(true).build());
+          ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_ON).build());
     } else if (Samplers.neverSample().equals(librarySampler)) {
       traceConfigProtoBuilder.setConstantSampler(
-          ConstantSampler.newBuilder().setDecision(false).build());
+          ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_OFF).build());
     } else {
       // TODO: consider exposing the sampling probability of ProbabilitySampler.
       double samplingProbability = parseSamplingProbability(librarySampler);
@@ -358,11 +359,15 @@
     TraceParams.Builder builder = currentTraceParams.toBuilder();
     if (traceConfigProto.hasConstantSampler()) {
       ConstantSampler constantSampler = traceConfigProto.getConstantSampler();
-      if (Boolean.TRUE.equals(constantSampler.getDecision())) {
+      ConstantDecision decision = constantSampler.getDecision();
+      if (ConstantDecision.ALWAYS_ON.equals(decision)) {
         builder.setSampler(Samplers.alwaysSample());
-      } else {
+      } else if (ConstantDecision.ALWAYS_OFF.equals(decision)) {
         builder.setSampler(Samplers.neverSample());
-      }
+      } // else if (ConstantDecision.ALWAYS_PARENT.equals(decision)) {
+      // For ALWAYS_PARENT, don't need to update configs since in Java by default parent sampling
+      // decision always takes precedence.
+      // }
     } else if (traceConfigProto.hasProbabilitySampler()) {
       builder.setSampler(
           Samplers.probabilitySampler(
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImpl.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImpl.java
index fbdb35e..2a05232 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImpl.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImpl.java
@@ -16,10 +16,7 @@
 
 package io.opencensus.exporter.trace.ocagent;
 
-import com.google.common.util.concurrent.MoreExecutors;
-import io.grpc.Server;
-import io.grpc.ServerBuilder;
-import io.grpc.netty.NettyServerBuilder;
+import com.google.common.annotations.VisibleForTesting;
 import io.grpc.stub.StreamObserver;
 import io.opencensus.proto.agent.trace.v1.CurrentLibraryConfig;
 import io.opencensus.proto.agent.trace.v1.ExportTraceServiceRequest;
@@ -27,65 +24,80 @@
 import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc;
 import io.opencensus.proto.agent.trace.v1.UpdatedLibraryConfig;
 import io.opencensus.proto.trace.v1.ConstantSampler;
+import io.opencensus.proto.trace.v1.ConstantSampler.ConstantDecision;
 import io.opencensus.proto.trace.v1.TraceConfig;
-import java.io.IOException;
-import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Executor;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicReference;
+import java.util.logging.Level;
 import java.util.logging.Logger;
 import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+import javax.annotation.concurrent.ThreadSafe;
 
 /** Fake implementation of {@link TraceServiceGrpc}. */
+@ThreadSafe
 final class FakeOcAgentTraceServiceGrpcImpl extends TraceServiceGrpc.TraceServiceImplBase {
 
   private static final Logger logger =
       Logger.getLogger(FakeOcAgentTraceServiceGrpcImpl.class.getName());
 
   // Default updatedLibraryConfig uses an always sampler.
+  @GuardedBy("this")
   private UpdatedLibraryConfig updatedLibraryConfig =
       UpdatedLibraryConfig.newBuilder()
           .setConfig(
               TraceConfig.newBuilder()
-                  .setConstantSampler(ConstantSampler.newBuilder().setDecision(true).build())
+                  .setConstantSampler(
+                      ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_ON).build())
                   .build())
           .build();
 
+  @GuardedBy("this")
   private final List<CurrentLibraryConfig> currentLibraryConfigs = new ArrayList<>();
+
+  @GuardedBy("this")
   private final List<ExportTraceServiceRequest> exportTraceServiceRequests = new ArrayList<>();
 
-  private final AtomicReference<StreamObserver<UpdatedLibraryConfig>> updatedConfigObserverRef =
+  @GuardedBy("this")
+  private final AtomicReference<StreamObserver<UpdatedLibraryConfig>> configRequestObserverRef =
       new AtomicReference<>();
 
-  private final StreamObserver<CurrentLibraryConfig> currentConfigObserver =
+  @GuardedBy("this")
+  private final StreamObserver<CurrentLibraryConfig> configResponseObserver =
       new StreamObserver<CurrentLibraryConfig>() {
         @Override
         public void onNext(CurrentLibraryConfig value) {
-          currentLibraryConfigs.add(value);
-          @Nullable
-          StreamObserver<UpdatedLibraryConfig> updatedConfigObserver =
-              updatedConfigObserverRef.get();
-          if (updatedConfigObserver != null) {
-            updatedConfigObserver.onNext(updatedLibraryConfig);
+          addCurrentLibraryConfig(value);
+          try {
+            // Do not send UpdatedLibraryConfigs too frequently.
+            Thread.sleep(1000);
+          } catch (InterruptedException e) {
+            logger.log(Level.WARNING, "Thread interrupted.", e);
           }
+          sendUpdatedLibraryConfig();
         }
 
         @Override
         public void onError(Throwable t) {
           logger.warning("Exception thrown for config stream: " + t);
+          resetConfigRequestObserverRef();
         }
 
         @Override
-        public void onCompleted() {}
+        public void onCompleted() {
+          resetConfigRequestObserverRef();
+        }
       };
 
+  @GuardedBy("this")
   private final StreamObserver<ExportTraceServiceRequest> exportRequestObserver =
       new StreamObserver<ExportTraceServiceRequest>() {
         @Override
         public void onNext(ExportTraceServiceRequest value) {
-          exportTraceServiceRequests.add(value);
+          addExportRequest(value);
         }
 
         @Override
@@ -97,73 +109,75 @@
         public void onCompleted() {}
       };
 
+  @GuardedBy("this")
+  private CountDownLatch countDownLatch;
+
   @Override
-  public StreamObserver<CurrentLibraryConfig> config(
+  public synchronized StreamObserver<CurrentLibraryConfig> config(
       StreamObserver<UpdatedLibraryConfig> updatedLibraryConfigStreamObserver) {
-    updatedConfigObserverRef.set(updatedLibraryConfigStreamObserver);
-    return currentConfigObserver;
+    configRequestObserverRef.set(updatedLibraryConfigStreamObserver);
+    return configResponseObserver;
   }
 
   @Override
-  public StreamObserver<ExportTraceServiceRequest> export(
+  public synchronized StreamObserver<ExportTraceServiceRequest> export(
       StreamObserver<ExportTraceServiceResponse> exportTraceServiceResponseStreamObserver) {
     return exportRequestObserver;
   }
 
+  private synchronized void addCurrentLibraryConfig(CurrentLibraryConfig currentLibraryConfig) {
+    if (countDownLatch != null && countDownLatch.getCount() == 0) {
+      return;
+    }
+    currentLibraryConfigs.add(currentLibraryConfig);
+  }
+
+  private synchronized void addExportRequest(ExportTraceServiceRequest request) {
+    exportTraceServiceRequests.add(request);
+  }
+
   // Returns the stored CurrentLibraryConfigs.
-  List<CurrentLibraryConfig> getCurrentLibraryConfigs() {
+  synchronized List<CurrentLibraryConfig> getCurrentLibraryConfigs() {
     return Collections.unmodifiableList(currentLibraryConfigs);
   }
 
   // Returns the stored ExportTraceServiceRequests.
-  List<ExportTraceServiceRequest> getExportTraceServiceRequests() {
+  synchronized List<ExportTraceServiceRequest> getExportTraceServiceRequests() {
     return Collections.unmodifiableList(exportTraceServiceRequests);
   }
 
   // Sets the UpdatedLibraryConfig that will be passed to client.
-  void setUpdatedLibraryConfig(UpdatedLibraryConfig updatedLibraryConfig) {
+  synchronized void setUpdatedLibraryConfig(UpdatedLibraryConfig updatedLibraryConfig) {
     this.updatedLibraryConfig = updatedLibraryConfig;
   }
 
   // Gets the UpdatedLibraryConfig that will be passed to client.
-  UpdatedLibraryConfig getUpdatedLibraryConfig() {
+  synchronized UpdatedLibraryConfig getUpdatedLibraryConfig() {
     return updatedLibraryConfig;
   }
 
-  static void startServer(String endPoint) throws IOException {
-    ServerBuilder<?> builder = NettyServerBuilder.forAddress(parseEndpoint(endPoint));
-    Executor executor = MoreExecutors.directExecutor();
-    builder.executor(executor);
-    final Server server = builder.addService(new FakeOcAgentTraceServiceGrpcImpl()).build();
-    server.start();
-    logger.info("Server started at " + endPoint);
-
-    Runtime.getRuntime()
-        .addShutdownHook(
-            new Thread() {
-              @Override
-              public void run() {
-                server.shutdown();
-              }
-            });
-
-    try {
-      server.awaitTermination();
-    } catch (InterruptedException e) {
-      logger.warning("Thread interrupted: " + e.getMessage());
-      Thread.currentThread().interrupt();
+  private synchronized void sendUpdatedLibraryConfig() {
+    @Nullable
+    StreamObserver<UpdatedLibraryConfig> configRequestObserver = configRequestObserverRef.get();
+    if (configRequestObserver != null) {
+      configRequestObserver.onNext(updatedLibraryConfig);
+    }
+    if (countDownLatch != null) {
+      countDownLatch.countDown();
     }
   }
 
-  private static InetSocketAddress parseEndpoint(String endPoint) {
-    try {
-      int colonIndex = endPoint.indexOf(":");
-      String host = endPoint.substring(0, colonIndex);
-      int port = Integer.parseInt(endPoint.substring(colonIndex + 1));
-      return new InetSocketAddress(host, port);
-    } catch (RuntimeException e) {
-      logger.warning("Unexpected format of end point: " + endPoint + ", use default end point.");
-      return new InetSocketAddress("localhost", 55678);
-    }
+  // Closes config stream and resets the reference to configRequestObserver.
+  synchronized void closeConfigStream() {
+    configResponseObserver.onCompleted();
+  }
+
+  private synchronized void resetConfigRequestObserverRef() {
+    configRequestObserverRef.set(null);
+  }
+
+  @VisibleForTesting
+  synchronized void setCountDownLatch(CountDownLatch countDownLatch) {
+    this.countDownLatch = countDownLatch;
   }
 }
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImplTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImplTest.java
index f619021..44b0a24 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImplTest.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/FakeOcAgentTraceServiceGrpcImplTest.java
@@ -24,6 +24,7 @@
 import io.opencensus.proto.agent.trace.v1.ExportTraceServiceResponse;
 import io.opencensus.proto.agent.trace.v1.UpdatedLibraryConfig;
 import io.opencensus.proto.trace.v1.ConstantSampler;
+import io.opencensus.proto.trace.v1.ConstantSampler.ConstantDecision;
 import io.opencensus.proto.trace.v1.TraceConfig;
 import java.util.ArrayList;
 import java.util.List;
@@ -68,7 +69,8 @@
       UpdatedLibraryConfig.newBuilder()
           .setConfig(
               TraceConfig.newBuilder()
-                  .setConstantSampler(ConstantSampler.newBuilder().setDecision(false).build())
+                  .setConstantSampler(
+                      ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_OFF).build())
                   .build())
           .build();
 
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtilsTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtilsTest.java
index 813066b..a718b49 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtilsTest.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentNodeUtilsTest.java
@@ -18,18 +18,14 @@
 
 import static com.google.common.truth.Truth.assertThat;
 import static io.opencensus.exporter.trace.ocagent.OcAgentNodeUtils.OC_AGENT_EXPORTER_VERSION;
-import static io.opencensus.exporter.trace.ocagent.OcAgentNodeUtils.RESOURCE_LABEL_ATTRIBUTE_KEY;
-import static io.opencensus.exporter.trace.ocagent.OcAgentNodeUtils.RESOURCE_TYPE_ATTRIBUTE_KEY;
 
 import io.opencensus.common.Timestamp;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
 import io.opencensus.proto.agent.common.v1.LibraryInfo;
 import io.opencensus.proto.agent.common.v1.LibraryInfo.Language;
 import io.opencensus.proto.agent.common.v1.ProcessIdentifier;
 import io.opencensus.proto.agent.common.v1.ServiceInfo;
-import java.util.Map;
+import io.opencensus.proto.resource.v1.Resource;
+import java.util.Collections;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
@@ -38,20 +34,9 @@
 @RunWith(JUnit4.class)
 public class OcAgentNodeUtilsTest {
 
-  private static final AwsEc2InstanceMonitoredResource AWS_RESOURCE =
-      AwsEc2InstanceMonitoredResource.create("account1", "instance1", "us-east-2");
-  private static final GcpGceInstanceMonitoredResource GCE_RESOURCE =
-      GcpGceInstanceMonitoredResource.create("account2", "instance2", "us-west2");
-  private static final GcpGkeContainerMonitoredResource GKE_RESOURCE =
-      GcpGkeContainerMonitoredResource.create(
-          "account3", "cluster", "container", "", "instance3", "", "us-west4");
-
-  @Test
-  public void testConstants() {
-    assertThat(OC_AGENT_EXPORTER_VERSION).isEqualTo("0.17.0-SNAPSHOT");
-    assertThat(RESOURCE_TYPE_ATTRIBUTE_KEY).isEqualTo("OPENCENSUS_SOURCE_TYPE");
-    assertThat(RESOURCE_LABEL_ATTRIBUTE_KEY).isEqualTo("OPENCENSUS_SOURCE_LABELS");
-  }
+  private static final io.opencensus.resource.Resource CUSTOM_RESOURCE =
+      io.opencensus.resource.Resource.create(
+          "some environment", Collections.singletonMap("k1", "v1"));
 
   @Test
   public void getProcessIdentifier() {
@@ -81,42 +66,15 @@
   }
 
   @Test
-  public void getAttributeMap_Null() {
-    Map<String, String> attributeMap = OcAgentNodeUtils.getAttributeMap(null);
-    assertThat(attributeMap).isEmpty();
+  public void toResourceProto_Null() {
+    Resource resourceProto = OcAgentNodeUtils.toResourceProto(null);
+    assertThat(resourceProto).isNull();
   }
 
   @Test
-  public void getAttributeMap_AwsEc2Resource() {
-    Map<String, String> attributeMap = OcAgentNodeUtils.getAttributeMap(AWS_RESOURCE);
-    assertThat(attributeMap)
-        .containsExactly(
-            RESOURCE_TYPE_ATTRIBUTE_KEY,
-            "AWS_EC2_INSTANCE",
-            RESOURCE_LABEL_ATTRIBUTE_KEY,
-            "aws_account=account1,instance_id=instance1,region=us-east-2");
-  }
-
-  @Test
-  public void getAttributeMap_GceResource() {
-    Map<String, String> attributeMap = OcAgentNodeUtils.getAttributeMap(GCE_RESOURCE);
-    assertThat(attributeMap)
-        .containsExactly(
-            RESOURCE_TYPE_ATTRIBUTE_KEY,
-            "GCP_GCE_INSTANCE",
-            RESOURCE_LABEL_ATTRIBUTE_KEY,
-            "gcp_account=account2,instance_id=instance2,zone=us-west2");
-  }
-
-  @Test
-  public void getAttributeMap_GkeResource() {
-    Map<String, String> attributeMap = OcAgentNodeUtils.getAttributeMap(GKE_RESOURCE);
-    assertThat(attributeMap)
-        .containsExactly(
-            RESOURCE_TYPE_ATTRIBUTE_KEY,
-            "GCP_GKE_CONTAINER",
-            RESOURCE_LABEL_ATTRIBUTE_KEY,
-            "gcp_account=account3,instance_id=instance3,location=us-west4,"
-                + "cluster_name=cluster,container_name=container");
+  public void toResourceProto() {
+    Resource resourceProto = OcAgentNodeUtils.toResourceProto(CUSTOM_RESOURCE);
+    assertThat(resourceProto.getType()).isEqualTo("some environment");
+    assertThat(resourceProto.getLabelsMap()).containsExactly("k1", "v1");
   }
 }
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfigurationTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfigurationTest.java
index 81bc5c6..daca516 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfigurationTest.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterConfigurationTest.java
@@ -18,7 +18,10 @@
 
 import static com.google.common.truth.Truth.assertThat;
 
+import io.netty.handler.ssl.SslContext;
+import io.netty.handler.ssl.SslContextBuilder;
 import io.opencensus.common.Duration;
+import javax.net.ssl.SSLException;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
@@ -31,28 +34,39 @@
   public void defaultConfiguration() {
     OcAgentTraceExporterConfiguration configuration =
         OcAgentTraceExporterConfiguration.builder().build();
-    assertThat(configuration.getEndPoint()).isNull();
-    assertThat(configuration.getServiceName()).isNull();
-    assertThat(configuration.getUseInsecure()).isNull();
-    assertThat(configuration.getRetryInterval()).isNull();
+    assertThat(configuration.getEndPoint())
+        .isEqualTo(OcAgentTraceExporterConfiguration.DEFAULT_END_POINT);
+    assertThat(configuration.getServiceName())
+        .isEqualTo(OcAgentTraceExporterConfiguration.DEFAULT_SERVICE_NAME);
+    assertThat(configuration.getUseInsecure()).isTrue();
+    assertThat(configuration.getSslContext()).isNull();
+    assertThat(configuration.getRetryInterval())
+        .isEqualTo(OcAgentTraceExporterConfiguration.DEFAULT_RETRY_INTERVAL);
     assertThat(configuration.getEnableConfig()).isTrue();
+    assertThat(configuration.getDeadline())
+        .isEqualTo(OcAgentTraceExporterConfiguration.DEFAULT_DEADLINE);
   }
 
   @Test
-  public void setAndGet() {
+  public void setAndGet() throws SSLException {
     Duration oneMinute = Duration.create(60, 0);
+    SslContext sslContext = SslContextBuilder.forClient().build();
     OcAgentTraceExporterConfiguration configuration =
         OcAgentTraceExporterConfiguration.builder()
             .setEndPoint("192.168.0.1:50051")
             .setServiceName("service")
-            .setUseInsecure(true)
+            .setUseInsecure(false)
+            .setSslContext(sslContext)
             .setRetryInterval(oneMinute)
             .setEnableConfig(false)
+            .setDeadline(oneMinute)
             .build();
     assertThat(configuration.getEndPoint()).isEqualTo("192.168.0.1:50051");
     assertThat(configuration.getServiceName()).isEqualTo("service");
-    assertThat(configuration.getUseInsecure()).isTrue();
+    assertThat(configuration.getUseInsecure()).isFalse();
+    assertThat(configuration.getSslContext()).isEqualTo(sslContext);
     assertThat(configuration.getRetryInterval()).isEqualTo(oneMinute);
     assertThat(configuration.getEnableConfig()).isFalse();
+    assertThat(configuration.getDeadline()).isEqualTo(oneMinute);
   }
 }
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterIntegrationTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterIntegrationTest.java
new file mode 100644
index 0000000..6f7e29c
--- /dev/null
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterIntegrationTest.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.common.util.concurrent.MoreExecutors;
+import com.google.protobuf.ByteString;
+import io.grpc.BindableService;
+import io.grpc.Server;
+import io.grpc.ServerBuilder;
+import io.grpc.netty.NettyServerBuilder;
+import io.opencensus.common.Scope;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.trace.v1.ExportTraceServiceRequest;
+import io.opencensus.trace.AttributeValue;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.config.TraceParams;
+import io.opencensus.trace.samplers.Samplers;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Executor;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** End-to-end integration test for {@link OcAgentTraceExporter}. */
+@RunWith(JUnit4.class)
+public class OcAgentTraceExporterIntegrationTest {
+
+  private Server agent;
+  private FakeOcAgentTraceServiceGrpcImpl fakeOcAgentTraceServiceGrpc;
+  private final Tracer tracer = Tracing.getTracer();
+
+  private static final String SERVICE_NAME = "integration-test";
+
+  @Before
+  public void setUp() throws IOException {
+    fakeOcAgentTraceServiceGrpc = new FakeOcAgentTraceServiceGrpcImpl();
+    agent =
+        getServer(OcAgentTraceExporterConfiguration.DEFAULT_END_POINT, fakeOcAgentTraceServiceGrpc);
+  }
+
+  @After
+  public void tearDown() {
+    OcAgentTraceExporter.unregister();
+    agent.shutdown();
+    Tracing.getTraceConfig().updateActiveTraceParams(TraceParams.DEFAULT);
+  }
+
+  @Test
+  public void testExportSpans() throws InterruptedException, IOException {
+    // Mock a real-life scenario in production, where Agent is not enabled at first, then enabled
+    // after an outage. Users should be able to see traces shortly after Agent is up.
+
+    // Configure to be always-sampled.
+    TraceConfig traceConfig = Tracing.getTraceConfig();
+    TraceParams activeTraceParams = traceConfig.getActiveTraceParams();
+    traceConfig.updateActiveTraceParams(
+        activeTraceParams.toBuilder().setSampler(Samplers.alwaysSample()).build());
+
+    // Register the OcAgent Exporter first.
+    // Agent is not yet up and running so Exporter will just retry connection.
+    OcAgentTraceExporter.createAndRegister(
+        OcAgentTraceExporterConfiguration.builder()
+            .setServiceName(SERVICE_NAME)
+            .setUseInsecure(true)
+            .setEnableConfig(false)
+            .build());
+
+    // Create one root span and 5 children.
+    try (Scope scope = tracer.spanBuilder("root").startScopedSpan()) {
+      for (int i = 0; i < 5; i++) {
+        // Fake work
+        doWork("first-iteration-child-" + i, i);
+      }
+    }
+
+    // Wait 5s so that SpanExporter exports exports all spans.
+    Thread.sleep(5000);
+
+    // No interaction with Agent so far.
+    assertThat(fakeOcAgentTraceServiceGrpc.getExportTraceServiceRequests()).isEmpty();
+
+    // Image an outage happened, now start Agent. Exporter should be able to connect to Agent
+    // when the next batch of SpanData arrives.
+    agent.start();
+
+    // Create one root span and 8 children.
+    try (Scope scope = tracer.spanBuilder("root2").startScopedSpan()) {
+      for (int i = 0; i < 8; i++) {
+        // Fake work
+        doWork("second-iteration-child-" + i, i);
+      }
+    }
+
+    // Wait 5s so that SpanExporter exports exports all spans.
+    Thread.sleep(5000);
+
+    List<ExportTraceServiceRequest> exportRequests =
+        fakeOcAgentTraceServiceGrpc.getExportTraceServiceRequests();
+    assertThat(exportRequests.size()).isAtLeast(2);
+
+    ExportTraceServiceRequest firstRequest = exportRequests.get(0);
+    Node expectedNode = OcAgentNodeUtils.getNodeInfo(SERVICE_NAME);
+    Node actualNode = firstRequest.getNode();
+    assertThat(actualNode.getIdentifier().getHostName())
+        .isEqualTo(expectedNode.getIdentifier().getHostName());
+    assertThat(actualNode.getIdentifier().getPid())
+        .isEqualTo(expectedNode.getIdentifier().getPid());
+    assertThat(actualNode.getLibraryInfo()).isEqualTo(expectedNode.getLibraryInfo());
+    assertThat(actualNode.getServiceInfo()).isEqualTo(expectedNode.getServiceInfo());
+
+    List<io.opencensus.proto.trace.v1.Span> spanProtos = new ArrayList<>();
+    for (int i = 1; i < exportRequests.size(); i++) {
+      spanProtos.addAll(exportRequests.get(i).getSpansList());
+    }
+
+    // On some platforms (e.g Windows) SpanData will never be dropped, so spans from the first batch
+    // may also be exported after Agent is up.
+    assertThat(spanProtos.size()).isAtLeast(9);
+
+    Set<String> exportedSpanNames = new HashSet<>();
+    for (io.opencensus.proto.trace.v1.Span spanProto : spanProtos) {
+      if ("root2".equals(spanProto.getName().getValue())) {
+        assertThat(spanProto.getChildSpanCount().getValue()).isEqualTo(8);
+        assertThat(spanProto.getParentSpanId()).isEqualTo(ByteString.EMPTY);
+      } else if ("root".equals(spanProto.getName().getValue())) {
+        // This won't happen on Linux but does happen on Windows.
+        assertThat(spanProto.getChildSpanCount().getValue()).isEqualTo(5);
+        assertThat(spanProto.getParentSpanId()).isEqualTo(ByteString.EMPTY);
+      }
+      exportedSpanNames.add(spanProto.getName().getValue());
+    }
+
+    // The second batch of spans should be exported no matter what.
+    assertThat(exportedSpanNames).contains("root2");
+    for (int i = 0; i < 8; i++) {
+      assertThat(exportedSpanNames).contains("second-iteration-child-" + i);
+    }
+  }
+
+  @Test
+  public void testConfig() {
+    //    OcAgentTraceExporter.createAndRegister(
+    //        OcAgentTraceExporterConfiguration.builder()
+    //            .setServiceName(SERVICE_NAME)
+    //            .setUseInsecure(true)
+    //            .setEnableConfig(false)
+    //            .build());
+
+    // TODO(songya): complete this test once Config is fully implemented.
+  }
+
+  private void doWork(String spanName, int i) {
+    try (Scope scope = tracer.spanBuilder(spanName).startScopedSpan()) {
+      // Simulate some work.
+      Span span = tracer.getCurrentSpan();
+
+      try {
+        Thread.sleep(10L);
+      } catch (InterruptedException e) {
+        span.setStatus(Status.INTERNAL.withDescription(e.toString()));
+      }
+
+      Map<String, AttributeValue> attributes = new HashMap<String, AttributeValue>();
+      attributes.put("inner work iteration number", AttributeValue.longAttributeValue(i));
+      span.addAnnotation("Invoking doWork", attributes);
+    }
+  }
+
+  private static Server getServer(String endPoint, BindableService service) throws IOException {
+    ServerBuilder<?> builder = NettyServerBuilder.forAddress(parseEndpoint(endPoint));
+    Executor executor = MoreExecutors.directExecutor();
+    builder.executor(executor);
+    return builder.addService(service).build();
+  }
+
+  private static InetSocketAddress parseEndpoint(String endPoint) {
+    try {
+      int colonIndex = endPoint.indexOf(":");
+      String host = endPoint.substring(0, colonIndex);
+      int port = Integer.parseInt(endPoint.substring(colonIndex + 1));
+      return new InetSocketAddress(host, port);
+    } catch (RuntimeException e) {
+      return new InetSocketAddress("localhost", 55678);
+    }
+  }
+}
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterTest.java
index c58acdb..5cf3532 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterTest.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.ocagent;
 
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.trace.export.SpanExporter;
@@ -45,8 +45,7 @@
     OcAgentTraceExporter.register(spanExporter, handler);
     verify(spanExporter)
         .registerHandler(
-            eq("io.opencensus.exporter.trace.ocagent.OcAgentTraceExporter"),
-            any(OcAgentTraceExporterHandler.class));
+            eq("io.opencensus.exporter.trace.ocagent.OcAgentTraceExporter"), any(Handler.class));
     OcAgentTraceExporter.unregister(spanExporter);
     verify(spanExporter)
         .unregisterHandler(eq("io.opencensus.exporter.trace.ocagent.OcAgentTraceExporter"));
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceRpcHandlersTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceRpcHandlersTest.java
new file mode 100644
index 0000000..af7ed11
--- /dev/null
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/OcAgentTraceServiceRpcHandlersTest.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.ocagent;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import io.grpc.ManagedChannel;
+import io.grpc.Server;
+import io.grpc.Status;
+import io.grpc.inprocess.InProcessChannelBuilder;
+import io.grpc.inprocess.InProcessServerBuilder;
+import io.opencensus.proto.agent.common.v1.LibraryInfo;
+import io.opencensus.proto.agent.common.v1.LibraryInfo.Language;
+import io.opencensus.proto.agent.common.v1.Node;
+import io.opencensus.proto.agent.trace.v1.CurrentLibraryConfig;
+import io.opencensus.proto.agent.trace.v1.ExportTraceServiceRequest;
+import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc;
+import io.opencensus.proto.agent.trace.v1.TraceServiceGrpc.TraceServiceStub;
+import io.opencensus.trace.config.TraceConfig;
+import io.opencensus.trace.config.TraceParams;
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+
+/**
+ * Unit tests for {@link OcAgentTraceServiceConfigRpcHandler} and {@link
+ * OcAgentTraceServiceExportRpcHandler}.
+ */
+@RunWith(JUnit4.class)
+public class OcAgentTraceServiceRpcHandlersTest {
+
+  @Mock private TraceConfig mockTraceConfig;
+
+  private Server server;
+  private FakeOcAgentTraceServiceGrpcImpl traceServiceGrpc;
+  private String serverName;
+
+  private static final Node NODE =
+      Node.newBuilder()
+          .setLibraryInfo(LibraryInfo.newBuilder().setLanguage(Language.JAVA).build())
+          .build();
+  private static final io.opencensus.proto.trace.v1.TraceConfig TRACE_CONFIG_DEFAULT_PROTO =
+      TraceProtoUtils.toTraceConfigProto(TraceParams.DEFAULT);
+
+  @Before
+  public void setUp() throws IOException {
+    MockitoAnnotations.initMocks(this);
+    Mockito.doReturn(TraceParams.DEFAULT).when(mockTraceConfig).getActiveTraceParams();
+    Mockito.doNothing().when(mockTraceConfig).updateActiveTraceParams(any(TraceParams.class));
+
+    traceServiceGrpc = new FakeOcAgentTraceServiceGrpcImpl();
+    serverName = InProcessServerBuilder.generateName();
+    server =
+        InProcessServerBuilder.forName(serverName)
+            .directExecutor() // directExecutor is fine for unit tests
+            .addService(traceServiceGrpc)
+            .build()
+            .start();
+  }
+
+  @After
+  public void tearDown() {
+    if (server != null && !server.isTerminated()) {
+      server.shutdown();
+    }
+  }
+
+  @Test
+  public void config_CreateAndSend() throws InterruptedException {
+    CountDownLatch countDownLatch = new CountDownLatch(1);
+    traceServiceGrpc.setCountDownLatch(countDownLatch);
+
+    // Config RPC handler needs to be running in another thread.
+    Runnable configRunnable =
+        new Runnable() {
+          @Override
+          public void run() {
+            TraceServiceStub stub = getStub(serverName);
+            OcAgentTraceServiceConfigRpcHandler configRpcHandler =
+                OcAgentTraceServiceConfigRpcHandler.create(stub, mockTraceConfig);
+            assertThat(configRpcHandler.isCompleted()).isFalse(); // connection should succeed
+            configRpcHandler.sendInitialMessage(NODE); // this will block this thread
+          }
+        };
+
+    Thread configThread = new Thread(configRunnable);
+    configThread.setDaemon(true);
+    configThread.setName("TestConfigRpcHandlerThread");
+    configThread.start();
+
+    // Wait until fake agent received the first message.
+    countDownLatch.await();
+    traceServiceGrpc.closeConfigStream();
+
+    // Verify fake Agent (server) received the expected CurrentLibraryConfig.
+    CurrentLibraryConfig expectedCurrentConfig =
+        CurrentLibraryConfig.newBuilder()
+            .setNode(NODE)
+            .setConfig(TRACE_CONFIG_DEFAULT_PROTO)
+            .build();
+    assertThat(traceServiceGrpc.getCurrentLibraryConfigs()).containsExactly(expectedCurrentConfig);
+
+    // Verify ConfigRpcHandler (client) received the expected UpdatedLibraryConfig.
+    TraceParams expectedParams =
+        TraceProtoUtils.getUpdatedTraceParams(
+            traceServiceGrpc.getUpdatedLibraryConfig(), mockTraceConfig);
+    verify(mockTraceConfig, times(1)).updateActiveTraceParams(expectedParams);
+  }
+
+  @Test
+  public void config_Create_ConnectionFailed() {
+    String nonExistingServer = "unknown";
+    OcAgentTraceServiceConfigRpcHandler configRpcHandler =
+        OcAgentTraceServiceConfigRpcHandler.create(getStub(nonExistingServer), mockTraceConfig);
+    verify(mockTraceConfig, times(0)).getActiveTraceParams();
+    assertThat(configRpcHandler.isCompleted()).isTrue();
+    assertThat(configRpcHandler.getTerminateStatus().getCode()).isEqualTo(Status.Code.UNAVAILABLE);
+  }
+
+  @Test
+  public void config_Complete_Interrupted() {
+    OcAgentTraceServiceConfigRpcHandler configRpcHandler =
+        OcAgentTraceServiceConfigRpcHandler.create(getStub(serverName), mockTraceConfig);
+    assertThat(configRpcHandler.isCompleted()).isFalse();
+    configRpcHandler.onComplete(new InterruptedException());
+    assertThat(configRpcHandler.isCompleted()).isTrue();
+    assertThat(configRpcHandler.getTerminateStatus()).isEqualTo(Status.UNKNOWN);
+  }
+
+  @Test
+  public void export_createAndExport() {
+    OcAgentTraceServiceExportRpcHandler exportRpcHandler =
+        OcAgentTraceServiceExportRpcHandler.create(getStub(serverName));
+    ExportTraceServiceRequest request = ExportTraceServiceRequest.newBuilder().build();
+    exportRpcHandler.onExport(request);
+    assertThat(traceServiceGrpc.getExportTraceServiceRequests()).containsExactly(request);
+  }
+
+  @Test
+  public void export_Create_ConnectionFailed() {
+    String nonExistingServer = "unknown";
+    OcAgentTraceServiceExportRpcHandler exportRpcHandler =
+        OcAgentTraceServiceExportRpcHandler.create(getStub(nonExistingServer));
+    assertThat(exportRpcHandler.isCompleted()).isTrue();
+    assertThat(exportRpcHandler.getTerminateStatus().getCode()).isEqualTo(Status.Code.UNAVAILABLE);
+  }
+
+  @Test
+  public void export_Complete_Interrupted() {
+    OcAgentTraceServiceExportRpcHandler exportRpcHandler =
+        OcAgentTraceServiceExportRpcHandler.create(getStub(serverName));
+    assertThat(exportRpcHandler.isCompleted()).isFalse();
+    exportRpcHandler.onComplete(new InterruptedException());
+    assertThat(exportRpcHandler.isCompleted()).isTrue();
+    assertThat(exportRpcHandler.getTerminateStatus()).isEqualTo(Status.UNKNOWN);
+  }
+
+  private static TraceServiceStub getStub(String serverName) {
+    ManagedChannel channel = InProcessChannelBuilder.forName(serverName).directExecutor().build();
+    return TraceServiceGrpc.newStub(channel);
+  }
+}
diff --git a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtilsTest.java b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtilsTest.java
index 74c7c29..7fc26dc 100644
--- a/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtilsTest.java
+++ b/exporters/trace/ocagent/src/test/java/io/opencensus/exporter/trace/ocagent/TraceProtoUtilsTest.java
@@ -28,6 +28,7 @@
 import io.opencensus.proto.agent.trace.v1.UpdatedLibraryConfig;
 import io.opencensus.proto.trace.v1.AttributeValue;
 import io.opencensus.proto.trace.v1.ConstantSampler;
+import io.opencensus.proto.trace.v1.ConstantSampler.ConstantDecision;
 import io.opencensus.proto.trace.v1.ProbabilitySampler;
 import io.opencensus.proto.trace.v1.Span;
 import io.opencensus.proto.trace.v1.Span.SpanKind;
@@ -260,7 +261,7 @@
     assertThat(span.getTimeEvents().getDroppedAnnotationsCount())
         .isEqualTo(DROPPED_ANNOTATIONS_COUNT);
     assertThat(span.getTimeEvents().getTimeEventList())
-        .containsAllOf(annotationTimeEvent1, annotationTimeEvent2, sentTimeEvent, recvTimeEvent);
+        .containsExactly(annotationTimeEvent1, annotationTimeEvent2, sentTimeEvent, recvTimeEvent);
     assertThat(span.getLinks()).isEqualTo(spanLinks);
     assertThat(span.getStatus()).isEqualTo(spanStatus);
     assertThat(span.getSameProcessAsParentSpan()).isEqualTo(BoolValue.of(true));
@@ -273,7 +274,8 @@
     assertThat(TraceProtoUtils.toTraceConfigProto(getTraceParams(Samplers.alwaysSample())))
         .isEqualTo(
             TraceConfig.newBuilder()
-                .setConstantSampler(ConstantSampler.newBuilder().setDecision(true).build())
+                .setConstantSampler(
+                    ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_ON).build())
                 .build());
   }
 
@@ -282,7 +284,8 @@
     assertThat(TraceProtoUtils.toTraceConfigProto(getTraceParams(Samplers.neverSample())))
         .isEqualTo(
             TraceConfig.newBuilder()
-                .setConstantSampler(ConstantSampler.newBuilder().setDecision(false).build())
+                .setConstantSampler(
+                    ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_OFF).build())
                 .build());
   }
 
@@ -300,7 +303,8 @@
   public void fromTraceConfigProto_AlwaysSampler() {
     TraceConfig traceConfig =
         TraceConfig.newBuilder()
-            .setConstantSampler(ConstantSampler.newBuilder().setDecision(true).build())
+            .setConstantSampler(
+                ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_ON).build())
             .build();
     assertThat(TraceProtoUtils.fromTraceConfigProto(traceConfig, DEFAULT_PARAMS).getSampler())
         .isEqualTo(Samplers.alwaysSample());
@@ -310,7 +314,8 @@
   public void fromTraceConfigProto_NeverSampler() {
     TraceConfig traceConfig =
         TraceConfig.newBuilder()
-            .setConstantSampler(ConstantSampler.newBuilder().setDecision(false).build())
+            .setConstantSampler(
+                ConstantSampler.newBuilder().setDecision(ConstantDecision.ALWAYS_OFF).build())
             .build();
     assertThat(TraceProtoUtils.fromTraceConfigProto(traceConfig, DEFAULT_PARAMS).getSampler())
         .isEqualTo(Samplers.neverSample());
diff --git a/exporters/trace/stackdriver/README.md b/exporters/trace/stackdriver/README.md
index 9186a47..0af5f06 100644
--- a/exporters/trace/stackdriver/README.md
+++ b/exporters/trace/stackdriver/README.md
@@ -34,17 +34,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-stackdriver</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -52,9 +52,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-trace-stackdriver:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-stackdriver:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
diff --git a/exporters/trace/stackdriver/build.gradle b/exporters/trace/stackdriver/build.gradle
index 83dc970..bacdefb 100644
--- a/exporters/trace/stackdriver/build.gradle
+++ b/exporters/trace/stackdriver/build.gradle
@@ -9,8 +9,12 @@
     compileOnly libraries.auto_value
 
     compile project(':opencensus-api'),
-            project(':opencensus-contrib-monitored-resource-util'),
+            project(':opencensus-contrib-resource-util'),
             libraries.google_auth,
+            libraries.grpc_auth,
+            libraries.grpc_core,
+            libraries.grpc_netty_shaded,
+            libraries.grpc_stub,
             libraries.guava
 
     compile (libraries.google_cloud_trace) {
@@ -20,12 +24,16 @@
         // Prefer library version.
         exclude group: 'com.google.code.findbugs', module: 'jsr305'
 
+        // Prefer library version.
+        exclude group: 'io.grpc', module: 'grpc-auth'
+        exclude group: 'io.grpc', module: 'grpc-core'
+        exclude group: 'io.grpc', module: 'grpc-netty-shaded'
+        exclude group: 'io.grpc', module: 'grpc-stub'
+
         // We will always be more up to date.
         exclude group: 'io.opencensus', module: 'opencensus-api'
     }
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverExporter.java b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverExporter.java
index 8797cc7..2880d05 100644
--- a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverExporter.java
+++ b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverExporter.java
@@ -109,10 +109,18 @@
     StackdriverTraceExporter.createAndRegister(
         StackdriverTraceConfiguration.builder()
             .setCredentials(GoogleCredentials.getApplicationDefault())
-            .setProjectId(ServiceOptions.getDefaultProjectId())
+
+            // TODO(sebright): Handle null default project ID.
+            .setProjectId(castNonNull(ServiceOptions.getDefaultProjectId()))
             .build());
   }
 
+  // TODO(sebright): Remove this method.
+  @SuppressWarnings("nullness")
+  private static <T> T castNonNull(@javax.annotation.Nullable T arg) {
+    return arg;
+  }
+
   /**
    * Registers the {@code StackdriverExporter}.
    *
diff --git a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfiguration.java b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfiguration.java
index f78832d..c19e95e 100644
--- a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfiguration.java
+++ b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfiguration.java
@@ -18,7 +18,16 @@
 
 import com.google.auth.Credentials;
 import com.google.auto.value.AutoValue;
+import com.google.cloud.ServiceOptions;
 import com.google.cloud.trace.v2.stub.TraceServiceStub;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import io.opencensus.common.Duration;
+import io.opencensus.trace.AttributeValue;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.Immutable;
 
@@ -31,6 +40,11 @@
 @Immutable
 public abstract class StackdriverTraceConfiguration {
 
+  private static final String DEFAULT_PROJECT_ID =
+      Strings.nullToEmpty(ServiceOptions.getDefaultProjectId());
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+
   StackdriverTraceConfiguration() {}
 
   /**
@@ -48,7 +62,6 @@
    * @return the cloud project id.
    * @since 0.12
    */
-  @Nullable
   public abstract String getProjectId();
 
   /**
@@ -61,13 +74,34 @@
   public abstract TraceServiceStub getTraceServiceStub();
 
   /**
+   * Returns a map of attributes that is added to all the exported spans.
+   *
+   * @return the map of attributes that is added to all the exported spans.
+   * @since 0.19
+   */
+  public abstract Map<String, AttributeValue> getFixedAttributes();
+
+  /**
+   * Returns the deadline for exporting to Stackdriver Trace backend.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
    * Returns a new {@link Builder}.
    *
    * @return a {@code Builder}.
    * @since 0.12
    */
   public static Builder builder() {
-    return new AutoValue_StackdriverTraceConfiguration.Builder();
+    return new AutoValue_StackdriverTraceConfiguration.Builder()
+        .setProjectId(DEFAULT_PROJECT_ID)
+        .setFixedAttributes(Collections.<String, AttributeValue>emptyMap())
+        .setDeadline(DEFAULT_DEADLINE);
   }
 
   /**
@@ -78,6 +112,8 @@
   @AutoValue.Builder
   public abstract static class Builder {
 
+    @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
     Builder() {}
 
     /**
@@ -108,11 +144,54 @@
     public abstract Builder setTraceServiceStub(TraceServiceStub traceServiceStub);
 
     /**
+     * Sets the map of attributes that is added to all the exported spans.
+     *
+     * @param fixedAttributes the map of attributes that is added to all the exported spans.
+     * @return this.
+     * @since 0.16
+     */
+    public abstract Builder setFixedAttributes(Map<String, AttributeValue> fixedAttributes);
+
+    /**
+     * Sets the deadline for exporting to Stackdriver Trace backend.
+     *
+     * <p>If both {@code TraceServiceStub} and {@code Deadline} are set, {@code TraceServiceStub}
+     * takes precedence and {@code Deadline} will not be respected.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    abstract String getProjectId();
+
+    abstract Map<String, AttributeValue> getFixedAttributes();
+
+    abstract Duration getDeadline();
+
+    abstract StackdriverTraceConfiguration autoBuild();
+
+    /**
      * Builds a {@link StackdriverTraceConfiguration}.
      *
      * @return a {@code StackdriverTraceConfiguration}.
      * @since 0.12
      */
-    public abstract StackdriverTraceConfiguration build();
+    public StackdriverTraceConfiguration build() {
+      // Make a defensive copy of fixed attributes.
+      setFixedAttributes(
+          Collections.unmodifiableMap(
+              new LinkedHashMap<String, AttributeValue>(getFixedAttributes())));
+      Preconditions.checkArgument(
+          !Strings.isNullOrEmpty(getProjectId()),
+          "Cannot find a project ID from either configurations or application default.");
+      for (Map.Entry<String, AttributeValue> fixedAttribute : getFixedAttributes().entrySet()) {
+        Preconditions.checkNotNull(fixedAttribute.getKey(), "attribute key");
+        Preconditions.checkNotNull(fixedAttribute.getValue(), "attribute value");
+      }
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      return autoBuild();
+    }
   }
 }
diff --git a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporter.java b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporter.java
index 0182ae9..cd8c2e0 100644
--- a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporter.java
+++ b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporter.java
@@ -78,17 +78,20 @@
       checkState(handler == null, "Stackdriver exporter is already registered.");
       Credentials credentials = configuration.getCredentials();
       String projectId = configuration.getProjectId();
-      projectId = projectId != null ? projectId : ServiceOptions.getDefaultProjectId();
 
       StackdriverV2ExporterHandler handler;
       TraceServiceStub stub = configuration.getTraceServiceStub();
       if (stub == null) {
         handler =
             StackdriverV2ExporterHandler.createWithCredentials(
+                projectId,
                 credentials != null ? credentials : GoogleCredentials.getApplicationDefault(),
-                projectId);
+                configuration.getFixedAttributes(),
+                configuration.getDeadline());
       } else {
-        handler = new StackdriverV2ExporterHandler(projectId, TraceServiceClient.create(stub));
+        handler =
+            StackdriverV2ExporterHandler.createWithStub(
+                projectId, TraceServiceClient.create(stub), configuration.getFixedAttributes());
       }
 
       registerInternal(handler);
diff --git a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandler.java b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandler.java
index de022c3..2a5f77f 100644
--- a/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandler.java
+++ b/exporters/trace/stackdriver/src/main/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandler.java
@@ -25,7 +25,6 @@
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableMap;
 import com.google.devtools.cloudtrace.v2.AttributeValue;
-import com.google.devtools.cloudtrace.v2.AttributeValue.Builder;
 import com.google.devtools.cloudtrace.v2.ProjectName;
 import com.google.devtools.cloudtrace.v2.Span;
 import com.google.devtools.cloudtrace.v2.Span.Attributes;
@@ -35,20 +34,19 @@
 import com.google.devtools.cloudtrace.v2.Span.TimeEvent.MessageEvent;
 import com.google.devtools.cloudtrace.v2.SpanName;
 import com.google.devtools.cloudtrace.v2.TruncatableString;
+import com.google.protobuf.BoolValue;
 import com.google.protobuf.Int32Value;
 import com.google.rpc.Status;
+import io.opencensus.common.Duration;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
 import io.opencensus.common.OpenCensusLibraryInformation;
 import io.opencensus.common.Scope;
 import io.opencensus.common.Timestamp;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResourceUtils;
-import io.opencensus.contrib.monitoredresource.util.ResourceType;
+import io.opencensus.contrib.resource.util.ResourceUtils;
+import io.opencensus.resource.Resource;
 import io.opencensus.trace.Annotation;
+import io.opencensus.trace.EndSpanOptions;
 import io.opencensus.trace.MessageEvent.Type;
 import io.opencensus.trace.Sampler;
 import io.opencensus.trace.Span.Kind;
@@ -65,6 +63,7 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -98,20 +97,18 @@
           .put("http.status_code", "/http/status_code")
           .build();
 
-  @javax.annotation.Nullable
-  private static final MonitoredResource RESOURCE = MonitoredResourceUtils.getDefaultResource();
-
   // Only initialize once.
-  private static final Map<String, AttributeValue> RESOURCE_LABELS = getResourceLabels(RESOURCE);
+  private static final Map<String, AttributeValue> RESOURCE_LABELS =
+      getResourceLabels(ResourceUtils.detectResource());
 
   // Constant functions for AttributeValue.
   private static final Function<String, /*@Nullable*/ AttributeValue> stringAttributeValueFunction =
       new Function<String, /*@Nullable*/ AttributeValue>() {
         @Override
         public AttributeValue apply(String stringValue) {
-          Builder attributeValueBuilder = AttributeValue.newBuilder();
-          attributeValueBuilder.setStringValue(toTruncatableStringProto(stringValue));
-          return attributeValueBuilder.build();
+          return AttributeValue.newBuilder()
+              .setStringValue(toTruncatableStringProto(stringValue))
+              .build();
         }
       };
   private static final Function<Boolean, /*@Nullable*/ AttributeValue>
@@ -119,60 +116,82 @@
           new Function<Boolean, /*@Nullable*/ AttributeValue>() {
             @Override
             public AttributeValue apply(Boolean booleanValue) {
-              Builder attributeValueBuilder = AttributeValue.newBuilder();
-              attributeValueBuilder.setBoolValue(booleanValue);
-              return attributeValueBuilder.build();
+              return AttributeValue.newBuilder().setBoolValue(booleanValue).build();
             }
           };
   private static final Function<Long, /*@Nullable*/ AttributeValue> longAttributeValueFunction =
       new Function<Long, /*@Nullable*/ AttributeValue>() {
         @Override
         public AttributeValue apply(Long longValue) {
-          Builder attributeValueBuilder = AttributeValue.newBuilder();
-          attributeValueBuilder.setIntValue(longValue);
-          return attributeValueBuilder.build();
+          return AttributeValue.newBuilder().setIntValue(longValue).build();
         }
       };
   private static final Function<Double, /*@Nullable*/ AttributeValue> doubleAttributeValueFunction =
       new Function<Double, /*@Nullable*/ AttributeValue>() {
         @Override
         public AttributeValue apply(Double doubleValue) {
-          Builder attributeValueBuilder = AttributeValue.newBuilder();
+          AttributeValue.Builder attributeValueBuilder = AttributeValue.newBuilder();
           // TODO: set double value if Stackdriver Trace support it in the future.
           attributeValueBuilder.setStringValue(
               toTruncatableStringProto(String.valueOf(doubleValue)));
           return attributeValueBuilder.build();
         }
       };
+  private static final String EXPORT_STACKDRIVER_TRACES = "ExportStackdriverTraces";
+  private static final EndSpanOptions END_SPAN_OPTIONS =
+      EndSpanOptions.builder().setSampleToLocalSpanStore(true).build();
 
+  private final Map<String, AttributeValue> fixedAttributes;
   private final String projectId;
   private final TraceServiceClient traceServiceClient;
   private final ProjectName projectName;
 
-  @VisibleForTesting
-  StackdriverV2ExporterHandler(String projectId, TraceServiceClient traceServiceClient) {
-    this.projectId = checkNotNull(projectId, "projectId");
+  private StackdriverV2ExporterHandler(
+      String projectId,
+      TraceServiceClient traceServiceClient,
+      Map<String, io.opencensus.trace.AttributeValue> fixedAttributes) {
+    this.projectId = projectId;
     this.traceServiceClient = traceServiceClient;
+    this.fixedAttributes = new HashMap<>();
+    for (Map.Entry<String, io.opencensus.trace.AttributeValue> label : fixedAttributes.entrySet()) {
+      AttributeValue value = toAttributeValueProto(label.getValue());
+      if (value != null) {
+        this.fixedAttributes.put(label.getKey(), value);
+      }
+    }
     projectName = ProjectName.of(this.projectId);
+  }
 
-    Tracing.getExportComponent()
-        .getSampledSpanStore()
-        .registerSpanNamesForCollection(Collections.singletonList("ExportStackdriverTraces"));
+  static StackdriverV2ExporterHandler createWithStub(
+      String projectId,
+      TraceServiceClient traceServiceClient,
+      Map<String, io.opencensus.trace.AttributeValue> fixedAttributes) {
+    return new StackdriverV2ExporterHandler(projectId, traceServiceClient, fixedAttributes);
   }
 
   static StackdriverV2ExporterHandler createWithCredentials(
-      Credentials credentials, String projectId) throws IOException {
-    checkNotNull(credentials, "credentials");
-    TraceServiceSettings traceServiceSettings =
+      String projectId,
+      Credentials credentials,
+      Map<String, io.opencensus.trace.AttributeValue> fixedAttributes,
+      Duration deadline)
+      throws IOException {
+    TraceServiceSettings.Builder builder =
         TraceServiceSettings.newBuilder()
-            .setCredentialsProvider(FixedCredentialsProvider.create(credentials))
-            .build();
+            .setCredentialsProvider(
+                FixedCredentialsProvider.create(checkNotNull(credentials, "credentials")));
+    // We only use the batchWriteSpans API in this exporter.
+    builder
+        .batchWriteSpansSettings()
+        .setSimpleTimeoutNoRetries(org.threeten.bp.Duration.ofMillis(deadline.toMillis()));
     return new StackdriverV2ExporterHandler(
-        projectId, TraceServiceClient.create(traceServiceSettings));
+        projectId, TraceServiceClient.create(builder.build()), fixedAttributes);
   }
 
   @VisibleForTesting
-  Span generateSpan(SpanData spanData, Map<String, AttributeValue> resourceLabels) {
+  Span generateSpan(
+      SpanData spanData,
+      Map<String, AttributeValue> resourceLabels,
+      Map<String, AttributeValue> fixedAttributes) {
     SpanContext context = spanData.getContext();
     final String spanIdHex = context.getSpanId().toLowerBase16();
     SpanName spanName =
@@ -188,7 +207,8 @@
             .setDisplayName(
                 toTruncatableStringProto(toDisplayName(spanData.getName(), spanData.getKind())))
             .setStartTime(toTimestampProto(spanData.getStartTimestamp()))
-            .setAttributes(toAttributesProto(spanData.getAttributes(), resourceLabels))
+            .setAttributes(
+                toAttributesProto(spanData.getAttributes(), resourceLabels, fixedAttributes))
             .setTimeEvents(
                 toTimeEventsProto(spanData.getAnnotations(), spanData.getMessageEvents()));
     io.opencensus.trace.Status status = spanData.getStatus();
@@ -207,7 +227,10 @@
     if (spanData.getParentSpanId() != null && spanData.getParentSpanId().isValid()) {
       spanBuilder.setParentSpanId(spanData.getParentSpanId().toLowerBase16());
     }
-
+    /*@Nullable*/ Boolean hasRemoteParent = spanData.getHasRemoteParent();
+    if (hasRemoteParent != null) {
+      spanBuilder.setSameProcessAsParentSpan(BoolValue.of(!hasRemoteParent));
+    }
     return spanBuilder.build();
   }
 
@@ -266,7 +289,8 @@
   // These are the attributes of the Span, where usually we may add more attributes like the agent.
   private static Attributes toAttributesProto(
       io.opencensus.trace.export.SpanData.Attributes attributes,
-      Map<String, AttributeValue> resourceLabels) {
+      Map<String, AttributeValue> resourceLabels,
+      Map<String, AttributeValue> fixedAttributes) {
     Attributes.Builder attributesBuilder =
         toAttributesBuilderProto(
             attributes.getAttributeMap(), attributes.getDroppedAttributesCount());
@@ -274,6 +298,9 @@
     for (Entry<String, AttributeValue> entry : resourceLabels.entrySet()) {
       attributesBuilder.putAttributeMap(entry.getKey(), entry.getValue());
     }
+    for (Entry<String, AttributeValue> entry : fixedAttributes.entrySet()) {
+      attributesBuilder.putAttributeMap(entry.getKey(), entry.getValue());
+    }
     return attributesBuilder.build();
   }
 
@@ -292,105 +319,25 @@
 
   @VisibleForTesting
   static Map<String, AttributeValue> getResourceLabels(
-      @javax.annotation.Nullable MonitoredResource resource) {
+      @javax.annotation.Nullable Resource resource) {
     if (resource == null) {
       return Collections.emptyMap();
     }
-    Map<String, AttributeValue> resourceLabels = new HashMap<String, AttributeValue>();
-    ResourceType resourceType = resource.getResourceType();
-    switch (resourceType) {
-      case AWS_EC2_INSTANCE:
-        AwsEc2InstanceMonitoredResource awsEc2InstanceMonitoredResource =
-            (AwsEc2InstanceMonitoredResource) resource;
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "aws_account",
-            awsEc2InstanceMonitoredResource.getAccount());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "instance_id",
-            awsEc2InstanceMonitoredResource.getInstanceId());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "region",
-            "aws:" + awsEc2InstanceMonitoredResource.getRegion());
-        return Collections.unmodifiableMap(resourceLabels);
-      case GCP_GCE_INSTANCE:
-        GcpGceInstanceMonitoredResource gcpGceInstanceMonitoredResource =
-            (GcpGceInstanceMonitoredResource) resource;
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "project_id",
-            gcpGceInstanceMonitoredResource.getAccount());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "instance_id",
-            gcpGceInstanceMonitoredResource.getInstanceId());
-        putToResourceAttributeMap(
-            resourceLabels, resourceType, "zone", gcpGceInstanceMonitoredResource.getZone());
-        return Collections.unmodifiableMap(resourceLabels);
-      case GCP_GKE_CONTAINER:
-        GcpGkeContainerMonitoredResource gcpGkeContainerMonitoredResource =
-            (GcpGkeContainerMonitoredResource) resource;
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "project_id",
-            gcpGkeContainerMonitoredResource.getAccount());
-        putToResourceAttributeMap(
-            resourceLabels, resourceType, "location", gcpGkeContainerMonitoredResource.getZone());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "cluster_name",
-            gcpGkeContainerMonitoredResource.getClusterName());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "container_name",
-            gcpGkeContainerMonitoredResource.getContainerName());
-        putToResourceAttributeMap(
-            resourceLabels,
-            resourceType,
-            "namespace_name",
-            gcpGkeContainerMonitoredResource.getNamespaceId());
-        putToResourceAttributeMap(
-            resourceLabels, resourceType, "pod_name", gcpGkeContainerMonitoredResource.getPodId());
-        return Collections.unmodifiableMap(resourceLabels);
+    Map<String, AttributeValue> resourceLabels = new LinkedHashMap<String, AttributeValue>();
+    for (Map.Entry<String, String> entry : resource.getLabels().entrySet()) {
+      putToResourceAttributeMap(resourceLabels, entry.getKey(), entry.getValue());
     }
-    return Collections.emptyMap();
+    return Collections.unmodifiableMap(resourceLabels);
   }
 
   private static void putToResourceAttributeMap(
-      Map<String, AttributeValue> map,
-      ResourceType resourceType,
-      String attributeName,
-      String attributeValue) {
-    map.put(
-        createResourceLabelKey(resourceType, attributeName),
-        toStringAttributeValueProto(attributeValue));
+      Map<String, AttributeValue> map, String attributeName, String attributeValue) {
+    map.put(createResourceLabelKey(attributeName), toStringAttributeValueProto(attributeValue));
   }
 
   @VisibleForTesting
-  static String createResourceLabelKey(ResourceType resourceType, String resourceAttribute) {
-    return String.format("g.co/r/%s/%s", mapToStringResourceType(resourceType), resourceAttribute);
-  }
-
-  private static String mapToStringResourceType(ResourceType resourceType) {
-    switch (resourceType) {
-      case GCP_GCE_INSTANCE:
-        return "gce_instance";
-      case GCP_GKE_CONTAINER:
-        return "k8s_container";
-      case AWS_EC2_INSTANCE:
-        return "aws_ec2_instance";
-    }
-    throw new IllegalArgumentException("Unknown resource type.");
+  static String createResourceLabelKey(String resourceAttribute) {
+    return "g.co/r/" + resourceAttribute;
   }
 
   @VisibleForTesting
@@ -480,22 +427,24 @@
     // Start a new span with explicit 1/10000 sampling probability to avoid the case when user
     // sets the default sampler to always sample and we get the gRPC span of the stackdriver
     // export call always sampled and go to an infinite loop.
-    Scope scope =
+    io.opencensus.trace.Span span =
         tracer
-            .spanBuilder("ExportStackdriverTraces")
+            .spanBuilder(EXPORT_STACKDRIVER_TRACES)
             .setSampler(probabilitySampler)
             .setRecordEvents(true)
-            .startScopedSpan();
+            .startSpan();
+    Scope scope = tracer.withSpan(span);
     try {
       List<Span> spans = new ArrayList<>(spanDataList.size());
       for (SpanData spanData : spanDataList) {
-        spans.add(generateSpan(spanData, RESOURCE_LABELS));
+        spans.add(generateSpan(spanData, RESOURCE_LABELS, fixedAttributes));
       }
       // Sync call because it is already called for a batch of data, and on a separate thread.
       // TODO(bdrutu): Consider to make this async in the future.
       traceServiceClient.batchWriteSpans(projectName, spans);
     } finally {
       scope.close();
+      span.end(END_SPAN_OPTIONS);
     }
   }
 }
diff --git a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfigurationTest.java b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfigurationTest.java
index 6926e86..30fc988 100644
--- a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfigurationTest.java
+++ b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceConfigurationTest.java
@@ -21,8 +21,15 @@
 import com.google.auth.Credentials;
 import com.google.auth.oauth2.AccessToken;
 import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.ServiceOptions;
+import io.opencensus.common.Duration;
+import io.opencensus.trace.AttributeValue;
+import java.util.Collections;
 import java.util.Date;
+import java.util.Map;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
@@ -33,22 +40,113 @@
   private static final Credentials FAKE_CREDENTIALS =
       GoogleCredentials.newBuilder().setAccessToken(new AccessToken("fake", new Date(100))).build();
   private static final String PROJECT_ID = "project";
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private static final Duration NEG_ONE_MINUTE = Duration.create(-60, 0);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
 
   @Test
   public void defaultConfiguration() {
-    StackdriverTraceConfiguration configuration = StackdriverTraceConfiguration.builder().build();
+    StackdriverTraceConfiguration configuration;
+    try {
+      configuration = StackdriverTraceConfiguration.builder().build();
+    } catch (Exception e) {
+      // Some test hosts may not have cloud project ID set up.
+      configuration = StackdriverTraceConfiguration.builder().setProjectId("test").build();
+    }
     assertThat(configuration.getCredentials()).isNull();
-    assertThat(configuration.getProjectId()).isNull();
+    assertThat(configuration.getProjectId()).isNotNull();
+    assertThat(configuration.getTraceServiceStub()).isNull();
+    assertThat(configuration.getFixedAttributes()).isEmpty();
+    assertThat(configuration.getDeadline())
+        .isEqualTo(StackdriverTraceConfiguration.DEFAULT_DEADLINE);
   }
 
   @Test
   public void updateAll() {
+    Map<String, AttributeValue> attributes =
+        Collections.singletonMap("key", AttributeValue.stringAttributeValue("val"));
     StackdriverTraceConfiguration configuration =
         StackdriverTraceConfiguration.builder()
             .setCredentials(FAKE_CREDENTIALS)
             .setProjectId(PROJECT_ID)
+            .setFixedAttributes(attributes)
+            .setDeadline(ONE_MINUTE)
             .build();
     assertThat(configuration.getCredentials()).isEqualTo(FAKE_CREDENTIALS);
     assertThat(configuration.getProjectId()).isEqualTo(PROJECT_ID);
+    assertThat(configuration.getFixedAttributes()).isEqualTo(attributes);
+    assertThat(configuration.getDeadline()).isEqualTo(ONE_MINUTE);
+  }
+
+  @Test
+  public void disallowNullProjectId() {
+    StackdriverTraceConfiguration.Builder builder = StackdriverTraceConfiguration.builder();
+    thrown.expect(NullPointerException.class);
+    builder.setProjectId(null);
+  }
+
+  @Test
+  public void disallowEmptyProjectId() {
+    StackdriverTraceConfiguration.Builder builder = StackdriverTraceConfiguration.builder();
+    builder.setProjectId("");
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void allowToUseDefaultProjectId() {
+    String defaultProjectId = ServiceOptions.getDefaultProjectId();
+    if (defaultProjectId != null) {
+      StackdriverTraceConfiguration configuration = StackdriverTraceConfiguration.builder().build();
+      assertThat(configuration.getProjectId()).isEqualTo(defaultProjectId);
+    }
+  }
+
+  @Test
+  public void disallowNullFixedAttributes() {
+    StackdriverTraceConfiguration.Builder builder =
+        StackdriverTraceConfiguration.builder().setProjectId("test");
+    thrown.expect(NullPointerException.class);
+    builder.setFixedAttributes(null);
+  }
+
+  @Test
+  public void disallowNullFixedAttributeKey() {
+    StackdriverTraceConfiguration.Builder builder =
+        StackdriverTraceConfiguration.builder().setProjectId("test");
+    Map<String, AttributeValue> attributes =
+        Collections.singletonMap(null, AttributeValue.stringAttributeValue("val"));
+    builder.setFixedAttributes(attributes);
+    thrown.expect(NullPointerException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNullFixedAttributeValue() {
+    StackdriverTraceConfiguration.Builder builder =
+        StackdriverTraceConfiguration.builder().setProjectId("test");
+    Map<String, AttributeValue> attributes = Collections.singletonMap("key", null);
+    builder.setFixedAttributes(attributes);
+    thrown.expect(NullPointerException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowZeroDuration() {
+    StackdriverTraceConfiguration.Builder builder =
+        StackdriverTraceConfiguration.builder().setProjectId("test");
+    builder.setDeadline(StackdriverTraceConfiguration.Builder.ZERO);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNegativeDuration() {
+    StackdriverTraceConfiguration.Builder builder =
+        StackdriverTraceConfiguration.builder().setProjectId("test");
+    builder.setDeadline(NEG_ONE_MINUTE);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
   }
 }
diff --git a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporterTest.java b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporterTest.java
index 6a12a89..6abff5a 100644
--- a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporterTest.java
+++ b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.stackdriver;
 
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.trace.export.SpanExporter;
diff --git a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerExportTest.java b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerExportTest.java
index 3245859..fdc370b 100644
--- a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerExportTest.java
+++ b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerExportTest.java
@@ -20,6 +20,7 @@
 
 import com.google.cloud.trace.v2.TraceServiceClient;
 import com.google.cloud.trace.v2.stub.TraceServiceStub;
+import io.opencensus.trace.AttributeValue;
 import io.opencensus.trace.export.SpanData;
 import java.util.Collection;
 import java.util.Collections;
@@ -49,7 +50,9 @@
     // TODO(@Hailong): TraceServiceClient.create(TraceServiceStub) is a beta API and might change
     // in the future.
     traceServiceClient = TraceServiceClient.create(traceServiceStub);
-    handler = new StackdriverV2ExporterHandler(PROJECT_ID, traceServiceClient);
+    handler =
+        StackdriverV2ExporterHandler.createWithStub(
+            PROJECT_ID, traceServiceClient, Collections.<String, AttributeValue>emptyMap());
   }
 
   @Test
diff --git a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerProtoTest.java b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerProtoTest.java
index 8b28dc0..d5d16d5 100644
--- a/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerProtoTest.java
+++ b/exporters/trace/stackdriver/src/test/java/io/opencensus/exporter/trace/stackdriver/StackdriverV2ExporterHandlerProtoTest.java
@@ -17,9 +17,7 @@
 package io.opencensus.exporter.trace.stackdriver;
 
 import static com.google.common.truth.Truth.assertThat;
-import static io.opencensus.contrib.monitoredresource.util.ResourceType.AWS_EC2_INSTANCE;
-import static io.opencensus.contrib.monitoredresource.util.ResourceType.GCP_GCE_INSTANCE;
-import static io.opencensus.contrib.monitoredresource.util.ResourceType.GCP_GKE_CONTAINER;
+import static io.opencensus.exporter.trace.stackdriver.StackdriverTraceConfiguration.DEFAULT_DEADLINE;
 import static io.opencensus.exporter.trace.stackdriver.StackdriverV2ExporterHandler.createResourceLabelKey;
 import static io.opencensus.exporter.trace.stackdriver.StackdriverV2ExporterHandler.toStringAttributeValueProto;
 
@@ -36,10 +34,9 @@
 import com.google.devtools.cloudtrace.v2.TruncatableString;
 import com.google.protobuf.Int32Value;
 import io.opencensus.common.Timestamp;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
-import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
+import io.opencensus.contrib.resource.util.CloudResource;
+import io.opencensus.contrib.resource.util.HostResource;
+import io.opencensus.resource.Resource;
 import io.opencensus.trace.Annotation;
 import io.opencensus.trace.Link;
 import io.opencensus.trace.Span.Kind;
@@ -48,6 +45,7 @@
 import io.opencensus.trace.Status;
 import io.opencensus.trace.TraceId;
 import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
 import io.opencensus.trace.export.SpanData;
 import io.opencensus.trace.export.SpanData.TimedEvent;
 import io.opencensus.trace.export.SpanData.TimedEvents;
@@ -104,7 +102,8 @@
   private static final SpanId spanId = SpanId.fromLowerBase16(SPAN_ID);
   private static final TraceId traceId = TraceId.fromLowerBase16(TRACE_ID);
   private static final TraceOptions traceOptions = TraceOptions.DEFAULT;
-  private static final SpanContext spanContext = SpanContext.create(traceId, spanId, traceOptions);
+  private static final SpanContext spanContext =
+      SpanContext.create(traceId, spanId, traceOptions, Tracestate.builder().build());
 
   private static final List<TimedEvent<Annotation>> annotationsList =
       ImmutableList.of(
@@ -131,56 +130,35 @@
       TimedEvents.create(networkEventsList, DROPPED_NETWORKEVENTS_COUNT);
   private static final SpanData.Links links = SpanData.Links.create(linksList, DROPPED_LINKS_COUNT);
   private static final Map<String, AttributeValue> EMPTY_RESOURCE_LABELS = Collections.emptyMap();
-  private static final AwsEc2InstanceMonitoredResource AWS_EC2_INSTANCE_MONITORED_RESOURCE =
-      AwsEc2InstanceMonitoredResource.create("my-project", "my-instance", "us-east-1");
-  private static final GcpGceInstanceMonitoredResource GCP_GCE_INSTANCE_MONITORED_RESOURCE =
-      GcpGceInstanceMonitoredResource.create("my-project", "my-instance", "us-east1");
-  private static final GcpGkeContainerMonitoredResource GCP_GKE_CONTAINER_MONITORED_RESOURCE =
-      GcpGkeContainerMonitoredResource.create(
-          "my-project", "cluster", "container", "namespace", "my-instance", "pod", "us-east1");
-  private static final ImmutableMap<String, AttributeValue> AWS_RESOURCE_LABELS =
+  private static final Resource CUSTOM_RESOURCE =
+      Resource.create(
+          "MyOwnResouce",
+          ImmutableMap.of(
+              CloudResource.ACCOUNT_ID_KEY,
+              "my-project",
+              CloudResource.ZONE_KEY,
+              "us-east1",
+              HostResource.ID_KEY,
+              "my-instance"));
+  private static final ImmutableMap<String, AttributeValue> EXPECTED_RESOURCE_ATTRIBUTES =
       ImmutableMap.of(
-          createResourceLabelKey(AWS_EC2_INSTANCE, "aws_account"),
+          createResourceLabelKey(CloudResource.ACCOUNT_ID_KEY),
           toStringAttributeValueProto("my-project"),
-          createResourceLabelKey(AWS_EC2_INSTANCE, "instance_id"),
-          toStringAttributeValueProto("my-instance"),
-          createResourceLabelKey(AWS_EC2_INSTANCE, "region"),
-          toStringAttributeValueProto("aws:us-east-1"));
-  private static final ImmutableMap<String, AttributeValue> GCE_RESOURCE_LABELS =
-      ImmutableMap.of(
-          createResourceLabelKey(GCP_GCE_INSTANCE, "project_id"),
-          toStringAttributeValueProto("my-project"),
-          createResourceLabelKey(GCP_GCE_INSTANCE, "instance_id"),
-          toStringAttributeValueProto("my-instance"),
-          createResourceLabelKey(GCP_GCE_INSTANCE, "zone"),
-          toStringAttributeValueProto("us-east1"));
-  private static final ImmutableMap<String, AttributeValue> GKE_RESOURCE_LABELS =
-      ImmutableMap.<String, AttributeValue>builder()
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "project_id"),
-              toStringAttributeValueProto("my-project"))
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "cluster_name"),
-              toStringAttributeValueProto("cluster"))
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "container_name"),
-              toStringAttributeValueProto("container"))
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "namespace_name"),
-              toStringAttributeValueProto("namespace"))
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "pod_name"),
-              toStringAttributeValueProto("pod"))
-          .put(
-              createResourceLabelKey(GCP_GKE_CONTAINER, "location"),
-              toStringAttributeValueProto("us-east1"))
-          .build();
+          createResourceLabelKey(CloudResource.ZONE_KEY),
+          toStringAttributeValueProto("us-east1"),
+          createResourceLabelKey(HostResource.ID_KEY),
+          toStringAttributeValueProto("my-instance"));
 
   private StackdriverV2ExporterHandler handler;
 
   @Before
   public void setUp() throws IOException {
-    handler = StackdriverV2ExporterHandler.createWithCredentials(FAKE_CREDENTIALS, PROJECT_ID);
+    handler =
+        StackdriverV2ExporterHandler.createWithCredentials(
+            PROJECT_ID,
+            FAKE_CREDENTIALS,
+            Collections.<String, io.opencensus.trace.AttributeValue>emptyMap(),
+            DEFAULT_DEADLINE);
   }
 
   @Test
@@ -283,7 +261,9 @@
             .setNanos(endTimestamp.getNanos())
             .build();
 
-    Span span = handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS);
+    Span span =
+        handler.generateSpan(
+            spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap());
     assertThat(span.getName()).isEqualTo(SD_SPAN_NAME);
     assertThat(span.getSpanId()).isEqualTo(SPAN_ID);
     assertThat(span.getParentSpanId()).isEqualTo(PARENT_SPAN_ID);
@@ -305,35 +285,20 @@
     assertThat(span.getTimeEvents().getDroppedAnnotationsCount())
         .isEqualTo(DROPPED_ANNOTATIONS_COUNT);
     assertThat(span.getTimeEvents().getTimeEventList())
-        .containsAllOf(annotationTimeEvent1, annotationTimeEvent2, sentTimeEvent, recvTimeEvent);
+        .containsExactly(annotationTimeEvent1, annotationTimeEvent2, sentTimeEvent, recvTimeEvent);
     assertThat(span.getLinks()).isEqualTo(spanLinks);
     assertThat(span.getStatus()).isEqualTo(spanStatus);
     assertThat(span.getSameProcessAsParentSpan())
-        .isEqualTo(com.google.protobuf.BoolValue.newBuilder().build());
+        .isEqualTo(com.google.protobuf.BoolValue.of(false));
     assertThat(span.getChildSpanCount())
         .isEqualTo(Int32Value.newBuilder().setValue(CHILD_SPAN_COUNT).build());
   }
 
   @Test
-  public void getResourceLabels_AwsEc2ResourceLabels() {
-    testGetResourceLabels(AWS_EC2_INSTANCE_MONITORED_RESOURCE, AWS_RESOURCE_LABELS);
-  }
-
-  @Test
-  public void getResourceLabels_GceResourceLabels() {
-    testGetResourceLabels(GCP_GCE_INSTANCE_MONITORED_RESOURCE, GCE_RESOURCE_LABELS);
-  }
-
-  @Test
-  public void getResourceLabels_GkeResourceLabels() {
-    testGetResourceLabels(GCP_GKE_CONTAINER_MONITORED_RESOURCE, GKE_RESOURCE_LABELS);
-  }
-
-  private static void testGetResourceLabels(
-      MonitoredResource resource, Map<String, AttributeValue> expectedLabels) {
+  public void getResourceLabels() {
     Map<String, AttributeValue> actualLabels =
-        StackdriverV2ExporterHandler.getResourceLabels(resource);
-    assertThat(actualLabels).containsExactlyEntriesIn(expectedLabels);
+        StackdriverV2ExporterHandler.getResourceLabels(CUSTOM_RESOURCE);
+    assertThat(actualLabels).containsExactlyEntriesIn(EXPECTED_RESOURCE_ATTRIBUTES);
   }
 
   @Test
@@ -353,9 +318,12 @@
             CHILD_SPAN_COUNT,
             status,
             endTimestamp);
-    Span span = handler.generateSpan(spanData, AWS_RESOURCE_LABELS);
+    Span span =
+        handler.generateSpan(
+            spanData, EXPECTED_RESOURCE_ATTRIBUTES, Collections.<String, AttributeValue>emptyMap());
     Map<String, AttributeValue> attributeMap = span.getAttributes().getAttributeMapMap();
-    assertThat(attributeMap.entrySet()).containsAllIn(AWS_RESOURCE_LABELS.entrySet());
+    assertThat(attributeMap.entrySet())
+        .containsAtLeastElementsIn(EXPECTED_RESOURCE_ATTRIBUTES.entrySet());
   }
 
   @Test
@@ -381,6 +349,7 @@
             parentSpanId,
             /* hasRemoteParent= */ true,
             SPAN_NAME,
+            null,
             startTimestamp,
             httpAttributes,
             annotations,
@@ -390,7 +359,9 @@
             status,
             endTimestamp);
 
-    Span span = handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS);
+    Span span =
+        handler.generateSpan(
+            spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap());
     Map<String, AttributeValue> attributes = span.getAttributes().getAttributeMapMap();
 
     assertThat(attributes).containsEntry("/http/host", toStringAttributeValueProto("host"));
@@ -404,6 +375,32 @@
   }
 
   @Test
+  public void exportChildSpanCount() {
+    SpanData spanData =
+        SpanData.create(
+            spanContext,
+            parentSpanId,
+            /* hasRemoteParent= */ true,
+            SPAN_NAME,
+            Kind.SERVER,
+            startTimestamp,
+            attributes,
+            annotations,
+            messageEvents,
+            links,
+            CHILD_SPAN_COUNT,
+            status,
+            endTimestamp);
+    assertThat(
+            handler
+                .generateSpan(
+                    spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap())
+                .getChildSpanCount()
+                .getValue())
+        .isEqualTo(CHILD_SPAN_COUNT);
+  }
+
+  @Test
   public void generateSpanName_ForServer() {
     SpanData spanData =
         SpanData.create(
@@ -420,7 +417,12 @@
             CHILD_SPAN_COUNT,
             status,
             endTimestamp);
-    assertThat(handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS).getDisplayName().getValue())
+    assertThat(
+            handler
+                .generateSpan(
+                    spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap())
+                .getDisplayName()
+                .getValue())
         .isEqualTo("Recv." + SPAN_NAME);
   }
 
@@ -441,7 +443,12 @@
             CHILD_SPAN_COUNT,
             status,
             endTimestamp);
-    assertThat(handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS).getDisplayName().getValue())
+    assertThat(
+            handler
+                .generateSpan(
+                    spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap())
+                .getDisplayName()
+                .getValue())
         .isEqualTo("Recv." + SPAN_NAME);
   }
 
@@ -462,7 +469,12 @@
             CHILD_SPAN_COUNT,
             status,
             endTimestamp);
-    assertThat(handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS).getDisplayName().getValue())
+    assertThat(
+            handler
+                .generateSpan(
+                    spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap())
+                .getDisplayName()
+                .getValue())
         .isEqualTo("Sent." + SPAN_NAME);
   }
 
@@ -483,7 +495,44 @@
             CHILD_SPAN_COUNT,
             status,
             endTimestamp);
-    assertThat(handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS).getDisplayName().getValue())
+    assertThat(
+            handler
+                .generateSpan(
+                    spanData, EMPTY_RESOURCE_LABELS, Collections.<String, AttributeValue>emptyMap())
+                .getDisplayName()
+                .getValue())
         .isEqualTo("Sent." + SPAN_NAME);
   }
+
+  @Test
+  public void addFixedAttributes() {
+    final ImmutableMap<String, AttributeValue> fixedAttributes =
+        ImmutableMap.of(
+            "string_attr_key",
+            toStringAttributeValueProto("my-project"),
+            "long_attr_key",
+            AttributeValue.newBuilder().setIntValue(1234).build(),
+            "bool_attr_key",
+            AttributeValue.newBuilder().setBoolValue(true).build());
+
+    SpanData spanData =
+        SpanData.create(
+            spanContext,
+            parentSpanId,
+            /* hasRemoteParent= */ true,
+            "Sent." + SPAN_NAME,
+            Kind.CLIENT,
+            startTimestamp,
+            attributes,
+            annotations,
+            messageEvents,
+            links,
+            CHILD_SPAN_COUNT,
+            status,
+            endTimestamp);
+
+    Span span = handler.generateSpan(spanData, EMPTY_RESOURCE_LABELS, fixedAttributes);
+    Map<String, AttributeValue> attributeMap = span.getAttributes().getAttributeMapMap();
+    assertThat(attributeMap.entrySet()).containsAtLeastElementsIn(fixedAttributes.entrySet());
+  }
 }
diff --git a/exporters/trace/util/README.md b/exporters/trace/util/README.md
new file mode 100644
index 0000000..6120b2d
--- /dev/null
+++ b/exporters/trace/util/README.md
@@ -0,0 +1,29 @@
+# OpenCensus Java Trace Exporter Util
+
+The *OpenCensus Trace Exporter Util* is the Java helper package for all trace exporters.
+
+## Quickstart
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-api</artifactId>
+    <version>0.22.0</version>
+  </dependency>
+  <dependency>
+    <groupId>io.opencensus</groupId>
+    <artifactId>opencensus-exporter-trace-util</artifactId>
+    <version>0.22.0</version>
+  </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.22.0'
+compile 'io.opencensus:opencensus-exporter-trace-util:0.22.0'
+```
diff --git a/contrib/monitored_resource_util/build.gradle b/exporters/trace/util/build.gradle
similarity index 72%
rename from contrib/monitored_resource_util/build.gradle
rename to exporters/trace/util/build.gradle
index 1e25c7c..4d83628 100644
--- a/contrib/monitored_resource_util/build.gradle
+++ b/exporters/trace/util/build.gradle
@@ -1,6 +1,4 @@
-description = 'OpenCensus Monitored Resource Util'
-
-apply plugin: 'java'
+description = 'OpenCensus Java Trace Exporter Utils'
 
 [compileJava, compileTestJava].each() {
     it.sourceCompatibility = 1.6
@@ -10,6 +8,9 @@
 dependencies {
     compileOnly libraries.auto_value
 
+    compile project(':opencensus-api'),
+            libraries.guava
+
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/exporters/trace/util/src/main/java/io/opencensus/exporter/trace/util/TimeLimitedHandler.java b/exporters/trace/util/src/main/java/io/opencensus/exporter/trace/util/TimeLimitedHandler.java
new file mode 100644
index 0000000..d30a1ff
--- /dev/null
+++ b/exporters/trace/util/src/main/java/io/opencensus/exporter/trace/util/TimeLimitedHandler.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.util;
+
+import com.google.common.util.concurrent.SimpleTimeLimiter;
+import com.google.common.util.concurrent.TimeLimiter;
+import com.google.errorprone.annotations.MustBeClosed;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Scope;
+import io.opencensus.trace.Sampler;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.export.SpanData;
+import io.opencensus.trace.export.SpanExporter;
+import io.opencensus.trace.export.SpanExporter.Handler;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.Collection;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * An abstract class that allows different tracing services to export recorded data for sampled
+ * spans in their own format within a given time frame. If export does not complete within the time
+ * frame, spans will be dropped and no retries will be performed.
+ *
+ * <p>Only extend this class if the client APIs don't support timeout natively. If there is a
+ * timeout option in the client APIs (for example Stackdriver Trace V2 API allows you to set
+ * timeout), use that instead.
+ *
+ * <p>To export data this MUST be register to to the ExportComponent using {@link
+ * SpanExporter#registerHandler(String, Handler)}.
+ *
+ * @since 0.22
+ */
+public abstract class TimeLimitedHandler extends SpanExporter.Handler {
+
+  private static final Logger logger = Logger.getLogger(TimeLimitedHandler.class.getName());
+  private static final Tracer tracer = Tracing.getTracer();
+  private static final Sampler lowProbabilitySampler = Samplers.probabilitySampler(0.0001);
+
+  private final Duration deadline;
+  private final String exportSpanName;
+
+  protected TimeLimitedHandler(Duration deadline, String exportSpanName) {
+    this.deadline = deadline;
+    this.exportSpanName = exportSpanName;
+  }
+
+  /**
+   * Exports a list of sampled (see {@link TraceOptions#isSampled()}) {@link Span}s using the
+   * immutable representation {@link SpanData}, within the given {@code deadline} of this {@link
+   * TimeLimitedHandler}.
+   *
+   * @param spanDataList a list of {@code SpanData} objects to be exported.
+   * @throws Exception throws exception when failed to export.
+   * @since 0.22
+   */
+  public abstract void timeLimitedExport(Collection<SpanData> spanDataList) throws Exception;
+
+  @Override
+  public void export(final Collection<SpanData> spanDataList) {
+    final Scope exportScope = newExportScope();
+    try {
+      TimeLimiter timeLimiter = SimpleTimeLimiter.create(Executors.newSingleThreadExecutor());
+      timeLimiter.callWithTimeout(
+          new Callable<Void>() {
+            @Override
+            public Void call() throws Exception {
+              timeLimitedExport(spanDataList);
+              return null;
+            }
+          },
+          deadline.toMillis(),
+          TimeUnit.MILLISECONDS);
+    } catch (TimeoutException e) {
+      handleException(e, "Timeout when exporting traces: " + e);
+    } catch (InterruptedException e) {
+      handleException(e, "Interrupted when exporting traces: " + e);
+    } catch (Exception e) {
+      handleException(e, "Failed to export traces: " + e);
+    } finally {
+      exportScope.close();
+    }
+  }
+
+  @MustBeClosed
+  private Scope newExportScope() {
+    return tracer.spanBuilder(exportSpanName).setSampler(lowProbabilitySampler).startScopedSpan();
+  }
+
+  private static void handleException(Exception e, String logMessage) {
+    Status status = e instanceof TimeoutException ? Status.DEADLINE_EXCEEDED : Status.UNKNOWN;
+    tracer
+        .getCurrentSpan()
+        .setStatus(
+            status.withDescription(
+                e.getMessage() == null ? e.getClass().getSimpleName() : e.getMessage()));
+    logger.log(Level.WARNING, logMessage);
+  }
+}
diff --git a/exporters/trace/zipkin/README.md b/exporters/trace/zipkin/README.md
index 4398360..69a37ba 100644
--- a/exporters/trace/zipkin/README.md
+++ b/exporters/trace/zipkin/README.md
@@ -4,7 +4,7 @@
 [![Maven Central][maven-image]][maven-url]
 
 The *OpenCensus Zipkin Trace Exporter* is a trace exporter that exports
-data to Zipkin. [Zipkin](http://zipkin.io/) Zipkin is a distributed
+data to Zipkin. [Zipkin](https://zipkin.io/) Zipkin is a distributed
 tracing system. It helps gather timing data needed to troubleshoot
 latency problems in microservice architectures. It manages both the
 collection and lookup of this data.
@@ -13,12 +13,12 @@
 
 ### Prerequisites
 
-[Zipkin](http://zipkin.io/) stores and queries traces exported by
+[Zipkin](https://zipkin.io/) stores and queries traces exported by
 applications instrumented with Census. The easiest way to start a zipkin
 server is to paste the below:
 
 ```bash
-wget -O zipkin.jar 'https://search.maven.org/remote_content?g=io.zipkin.java&a=zipkin-server&v=LATEST&c=exec'
+curl -sSL https://zipkin.io/quickstart.sh | bash -s
 java -jar zipkin.jar
 ```
 
@@ -33,17 +33,17 @@
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-api</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-exporter-trace-zipkin</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
   </dependency>
   <dependency>
     <groupId>io.opencensus</groupId>
     <artifactId>opencensus-impl</artifactId>
-    <version>0.16.1</version>
+    <version>0.28.3</version>
     <scope>runtime</scope>
   </dependency>
 </dependencies>
@@ -51,9 +51,9 @@
 
 For Gradle add to your dependencies:
 ```groovy
-compile 'io.opencensus:opencensus-api:0.16.1'
-compile 'io.opencensus:opencensus-exporter-trace-zipkin:0.16.1'
-runtime 'io.opencensus:opencensus-impl:0.16.1'
+compile 'io.opencensus:opencensus-api:0.28.3'
+compile 'io.opencensus:opencensus-exporter-trace-zipkin:0.28.3'
+runtime 'io.opencensus:opencensus-impl:0.28.3'
 ```
 
 #### Register the exporter
diff --git a/exporters/trace/zipkin/build.gradle b/exporters/trace/zipkin/build.gradle
index 530dff7..8219af9 100644
--- a/exporters/trace/zipkin/build.gradle
+++ b/exporters/trace/zipkin/build.gradle
@@ -6,13 +6,14 @@
 }
 
 dependencies {
+    compileOnly libraries.auto_value
+
     compile project(':opencensus-api'),
+            project(':opencensus-exporter-trace-util'),
             libraries.guava,
             libraries.zipkin_reporter,
             libraries.zipkin_urlconnection
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfiguration.java b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfiguration.java
new file mode 100644
index 0000000..a692cd1
--- /dev/null
+++ b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfiguration.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.zipkin;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.opencensus.common.Duration;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+import zipkin2.codec.SpanBytesEncoder;
+import zipkin2.reporter.Sender;
+
+/**
+ * Configurations for {@link ZipkinTraceExporter}.
+ *
+ * @since 0.22
+ */
+@AutoValue
+@Immutable
+public abstract class ZipkinExporterConfiguration {
+
+  @VisibleForTesting static final Duration DEFAULT_DEADLINE = Duration.create(10, 0);
+
+  ZipkinExporterConfiguration() {}
+
+  /**
+   * Returns the service name.
+   *
+   * @return the service name.
+   * @since 0.22
+   */
+  public abstract String getServiceName();
+
+  /**
+   * Returns the Zipkin V2 URL.
+   *
+   * @return the Zipkin V2 URL.
+   * @since 0.22
+   */
+  public abstract String getV2Url();
+
+  /**
+   * Returns the Zipkin sender.
+   *
+   * @return the Zipkin sender.
+   * @since 0.22
+   */
+  @Nullable
+  public abstract Sender getSender();
+
+  /**
+   * Returns the {@link SpanBytesEncoder}.
+   *
+   * <p>Default is {@link SpanBytesEncoder#JSON_V2}.
+   *
+   * @return the {@code SpanBytesEncoder}
+   * @since 0.22
+   */
+  public abstract SpanBytesEncoder getEncoder();
+
+  /**
+   * Returns the deadline for exporting to Zipkin.
+   *
+   * <p>Default value is 10 seconds.
+   *
+   * @return the export deadline.
+   * @since 0.22
+   */
+  public abstract Duration getDeadline();
+
+  /**
+   * Returns a new {@link Builder}.
+   *
+   * @return a {@code Builder}.
+   * @since 0.22
+   */
+  public static Builder builder() {
+    return new AutoValue_ZipkinExporterConfiguration.Builder()
+        .setV2Url("")
+        .setEncoder(SpanBytesEncoder.JSON_V2)
+        .setDeadline(DEFAULT_DEADLINE);
+  }
+
+  /**
+   * Builder for {@link ZipkinExporterConfiguration}.
+   *
+   * @since 0.22
+   */
+  @AutoValue.Builder
+  public abstract static class Builder {
+
+    @VisibleForTesting static final Duration ZERO = Duration.fromMillis(0);
+
+    Builder() {}
+
+    /**
+     * Sets the service name.
+     *
+     * @param serviceName the service name.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setServiceName(String serviceName);
+
+    /**
+     * Sets the Zipkin V2 URL, e.g.: "http://127.0.0.1:9411/api/v2/spans".
+     *
+     * <p>At least one of {@code V2Url} and {@code Sender} needs to be specified. If both {@code
+     * V2Url} and {@code Sender} are set, {@code Sender} takes precedence.
+     *
+     * @param v2Url the Zipkin V2 URL.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setV2Url(String v2Url);
+
+    /**
+     * Sets the Zipkin sender.
+     *
+     * <p>At least one of {@code V2Url} and {@code Sender} needs to be specified. If both {@code
+     * V2Url} and {@code Sender} are set, {@code Sender} takes precedence.
+     *
+     * @param sender the Zipkin sender.
+     * @return this.
+     * @since 0.22
+     */
+    public abstract Builder setSender(Sender sender);
+
+    /**
+     * Sets the {@link SpanBytesEncoder}.
+     *
+     * @param encoder the {@code SpanBytesEncoder}.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setEncoder(SpanBytesEncoder encoder);
+
+    /**
+     * Sets the deadline for exporting to Zipkin.
+     *
+     * @param deadline the export deadline.
+     * @return this
+     * @since 0.22
+     */
+    public abstract Builder setDeadline(Duration deadline);
+
+    abstract Duration getDeadline();
+
+    abstract String getV2Url();
+
+    @Nullable
+    abstract Sender getSender();
+
+    abstract ZipkinExporterConfiguration autoBuild();
+
+    /**
+     * Builds a {@link ZipkinExporterConfiguration}.
+     *
+     * @return a {@code ZipkinExporterConfiguration}.
+     * @since 0.22
+     */
+    public ZipkinExporterConfiguration build() {
+      Preconditions.checkArgument(getDeadline().compareTo(ZERO) > 0, "Deadline must be positive.");
+      Preconditions.checkArgument(
+          !getV2Url().isEmpty() || getSender() != null,
+          "Neither Zipkin V2 URL nor Zipkin sender is specified.");
+      return autoBuild();
+    }
+  }
+}
diff --git a/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandler.java b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandler.java
index 70bc725..e858e17 100644
--- a/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandler.java
+++ b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandler.java
@@ -19,22 +19,19 @@
 import static java.util.concurrent.TimeUnit.NANOSECONDS;
 import static java.util.concurrent.TimeUnit.SECONDS;
 
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.Duration;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
-import io.opencensus.common.Scope;
 import io.opencensus.common.Timestamp;
+import io.opencensus.exporter.trace.util.TimeLimitedHandler;
 import io.opencensus.trace.Annotation;
 import io.opencensus.trace.AttributeValue;
-import io.opencensus.trace.Sampler;
 import io.opencensus.trace.Span.Kind;
 import io.opencensus.trace.SpanContext;
 import io.opencensus.trace.Status;
-import io.opencensus.trace.Tracer;
-import io.opencensus.trace.Tracing;
 import io.opencensus.trace.export.SpanData;
 import io.opencensus.trace.export.SpanData.TimedEvent;
-import io.opencensus.trace.export.SpanExporter;
-import io.opencensus.trace.samplers.Samplers;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.NetworkInterface;
@@ -54,18 +51,23 @@
 import org.checkerframework.checker.nullness.qual.Nullable;
 */
 
-final class ZipkinExporterHandler extends SpanExporter.Handler {
-  private static final Tracer tracer = Tracing.getTracer();
-  private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
+final class ZipkinExporterHandler extends TimeLimitedHandler {
   private static final Logger logger = Logger.getLogger(ZipkinExporterHandler.class.getName());
+  private static final String EXPORT_SPAN_NAME = "SendZipkinSpans";
 
-  private static final String STATUS_CODE = "census.status_code";
-  private static final String STATUS_DESCRIPTION = "census.status_description";
+  // The naming follows Zipkin convention. As an example see:
+  // https://github.com/apache/incubator-zipkin-brave/blob/643b7245c462dc14d47afcdb076b2603fd421497/instrumentation/grpc/src/main/java/brave/grpc/GrpcParser.java#L67-L73
+  @VisibleForTesting static final String STATUS_CODE = "opencensus.status_code";
+  @VisibleForTesting static final String STATUS_DESCRIPTION = "opencensus.status_description";
+  @VisibleForTesting static final String STATUS_ERROR = "error";
+
   private final SpanBytesEncoder encoder;
   private final Sender sender;
   private final Endpoint localEndpoint;
 
-  ZipkinExporterHandler(SpanBytesEncoder encoder, Sender sender, String serviceName) {
+  ZipkinExporterHandler(
+      SpanBytesEncoder encoder, Sender sender, String serviceName, Duration deadline) {
+    super(deadline, EXPORT_SPAN_NAME);
     this.encoder = encoder;
     this.sender = sender;
     this.localEndpoint = produceLocalEndpoint(serviceName);
@@ -134,6 +136,9 @@
       if (status.getDescription() != null) {
         spanBuilder.putTag(STATUS_DESCRIPTION, status.getDescription());
       }
+      if (!status.isOk()) {
+        spanBuilder.putTag(STATUS_ERROR, status.getCanonicalCode().toString());
+      }
     }
 
     for (TimedEvent<Annotation> annotation : spanData.getAnnotations().getEvents()) {
@@ -187,29 +192,11 @@
   }
 
   @Override
-  public void export(Collection<SpanData> spanDataList) {
-    // Start a new span with explicit 1/10000 sampling probability to avoid the case when user
-    // sets the default sampler to always sample and we get the gRPC span of the zipkin
-    // export call always sampled and go to an infinite loop.
-    Scope scope =
-        tracer.spanBuilder("SendZipkinSpans").setSampler(probabilitySampler).startScopedSpan();
-    try {
-      List<byte[]> encodedSpans = new ArrayList<byte[]>(spanDataList.size());
-      for (SpanData spanData : spanDataList) {
-        encodedSpans.add(encoder.encode(generateSpan(spanData, localEndpoint)));
-      }
-      try {
-        sender.sendSpans(encodedSpans).execute();
-      } catch (IOException e) {
-        tracer
-            .getCurrentSpan()
-            .setStatus(
-                Status.UNKNOWN.withDescription(
-                    e.getMessage() == null ? e.getClass().getSimpleName() : e.getMessage()));
-        throw new RuntimeException(e); // TODO: should we instead do drop metrics?
-      }
-    } finally {
-      scope.close();
+  public void timeLimitedExport(final Collection<SpanData> spanDataList) throws IOException {
+    List<byte[]> encodedSpans = new ArrayList<byte[]>(spanDataList.size());
+    for (SpanData spanData : spanDataList) {
+      encodedSpans.add(encoder.encode(generateSpan(spanData, localEndpoint)));
     }
+    sender.sendSpans(encodedSpans).execute();
   }
 }
diff --git a/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporter.java b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporter.java
index aad5a56..c681617 100644
--- a/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporter.java
+++ b/exporters/trace/zipkin/src/main/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporter.java
@@ -36,7 +36,11 @@
  *
  * <pre>{@code
  * public static void main(String[] args) {
- *   ZipkinTraceExporter.createAndRegister("http://127.0.0.1:9411/api/v2/spans", "myservicename");
+ *   ZipkinTraceExporter.createAndRegister(
+ *     ZipkinExporterConfiguration.builder()
+ *       .setV2Url("http://127.0.0.1:9411/api/v2/spans")
+ *       .setServiceName("myservicename")
+ *       .build());
  *   ... // Do work.
  * }
  * }</pre>
@@ -58,13 +62,42 @@
    * Creates and registers the Zipkin Trace exporter to the OpenCensus library. Only one Zipkin
    * exporter can be registered at any point.
    *
+   * @param configuration configuration for this exporter.
+   * @throws IllegalStateException if a Zipkin exporter is already registered.
+   * @since 0.22
+   */
+  public static void createAndRegister(ZipkinExporterConfiguration configuration) {
+    synchronized (monitor) {
+      checkState(handler == null, "Zipkin exporter is already registered.");
+      Sender sender = configuration.getSender();
+      if (sender == null) {
+        sender = URLConnectionSender.create(configuration.getV2Url());
+      }
+      Handler newHandler =
+          new ZipkinExporterHandler(
+              configuration.getEncoder(),
+              sender,
+              configuration.getServiceName(),
+              configuration.getDeadline());
+      handler = newHandler;
+      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
+    }
+  }
+
+  /**
+   * Creates and registers the Zipkin Trace exporter to the OpenCensus library. Only one Zipkin
+   * exporter can be registered at any point.
+   *
    * @param v2Url Ex http://127.0.0.1:9411/api/v2/spans
    * @param serviceName the {@link Span#localServiceName() local service name} of the process.
    * @throws IllegalStateException if a Zipkin exporter is already registered.
    * @since 0.12
+   * @deprecated in favor of {@link #createAndRegister(ZipkinExporterConfiguration)}.
    */
+  @Deprecated
   public static void createAndRegister(String v2Url, String serviceName) {
-    createAndRegister(SpanBytesEncoder.JSON_V2, URLConnectionSender.create(v2Url), serviceName);
+    createAndRegister(
+        ZipkinExporterConfiguration.builder().setV2Url(v2Url).setServiceName(serviceName).build());
   }
 
   /**
@@ -76,15 +109,17 @@
    * @param serviceName the {@link Span#localServiceName() local service name} of the process.
    * @throws IllegalStateException if a Zipkin exporter is already registered.
    * @since 0.12
+   * @deprecated in favor of {@link #createAndRegister(ZipkinExporterConfiguration)}.
    */
+  @Deprecated
   public static void createAndRegister(
       SpanBytesEncoder encoder, Sender sender, String serviceName) {
-    synchronized (monitor) {
-      checkState(handler == null, "Zipkin exporter is already registered.");
-      Handler newHandler = new ZipkinExporterHandler(encoder, sender, serviceName);
-      handler = newHandler;
-      register(Tracing.getExportComponent().getSpanExporter(), newHandler);
-    }
+    createAndRegister(
+        ZipkinExporterConfiguration.builder()
+            .setSender(sender)
+            .setEncoder(encoder)
+            .setServiceName(serviceName)
+            .build());
   }
 
   /**
diff --git a/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfigurationTest.java b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfigurationTest.java
new file mode 100644
index 0000000..2735185
--- /dev/null
+++ b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterConfigurationTest.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.trace.zipkin;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.common.Duration;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import zipkin2.codec.SpanBytesEncoder;
+import zipkin2.reporter.Sender;
+
+/** Unit tests for {@link ZipkinExporterConfiguration}. */
+@RunWith(JUnit4.class)
+public class ZipkinExporterConfigurationTest {
+
+  private static final String SERVICE = "service";
+  private static final String END_POINT = "endpoint";
+  private static final Duration ONE_MIN = Duration.create(60, 0);
+  private static final Duration NEG_ONE_MIN = Duration.create(-60, 0);
+
+  @Mock private static final Sender mockSender = Mockito.mock(Sender.class);
+
+  @Rule public final ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void updateConfigs() {
+    ZipkinExporterConfiguration configuration =
+        ZipkinExporterConfiguration.builder()
+            .setServiceName(SERVICE)
+            .setDeadline(ONE_MIN)
+            .setSender(mockSender)
+            .setV2Url(END_POINT)
+            .setEncoder(SpanBytesEncoder.PROTO3)
+            .build();
+    assertThat(configuration.getServiceName()).isEqualTo(SERVICE);
+    assertThat(configuration.getDeadline()).isEqualTo(ONE_MIN);
+    assertThat(configuration.getV2Url()).isEqualTo(END_POINT);
+    assertThat(configuration.getSender()).isEqualTo(mockSender);
+    assertThat(configuration.getEncoder()).isEqualTo(SpanBytesEncoder.PROTO3);
+  }
+
+  @Test
+  public void needEitherUrlOrSender() {
+    ZipkinExporterConfiguration.Builder builder =
+        ZipkinExporterConfiguration.builder().setServiceName(SERVICE);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowZeroDuration() {
+    ZipkinExporterConfiguration.Builder builder =
+        ZipkinExporterConfiguration.builder().setServiceName(SERVICE).setV2Url(END_POINT);
+    builder.setDeadline(ZipkinExporterConfiguration.Builder.ZERO);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+
+  @Test
+  public void disallowNegativeDuration() {
+    ZipkinExporterConfiguration.Builder builder =
+        ZipkinExporterConfiguration.builder().setServiceName(SERVICE).setV2Url(END_POINT);
+    builder.setDeadline(NEG_ONE_MIN);
+    thrown.expect(IllegalArgumentException.class);
+    builder.build();
+  }
+}
diff --git a/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandlerTest.java b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandlerTest.java
index 7e29300..aa731b4 100644
--- a/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandlerTest.java
+++ b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinExporterHandlerTest.java
@@ -102,7 +102,7 @@
                 .localEndpoint(localEndpoint)
                 .addAnnotation(1505855799000000L + 433901068L / 1000, "RECEIVED")
                 .addAnnotation(1505855799000000L + 459486280L / 1000, "SENT")
-                .putTag("census.status_code", "OK")
+                .putTag(ZipkinExporterHandler.STATUS_CODE, "OK")
                 .build());
   }
 
@@ -143,7 +143,7 @@
                 .localEndpoint(localEndpoint)
                 .addAnnotation(1505855799000000L + 433901068L / 1000, "RECEIVED")
                 .addAnnotation(1505855799000000L + 459486280L / 1000, "SENT")
-                .putTag("census.status_code", "OK")
+                .putTag(ZipkinExporterHandler.STATUS_CODE, "OK")
                 .build());
   }
 
@@ -184,7 +184,7 @@
                 .localEndpoint(localEndpoint)
                 .addAnnotation(1505855799000000L + 433901068L / 1000, "RECEIVED")
                 .addAnnotation(1505855799000000L + 459486280L / 1000, "SENT")
-                .putTag("census.status_code", "OK")
+                .putTag(ZipkinExporterHandler.STATUS_CODE, "OK")
                 .build());
   }
 
@@ -229,10 +229,53 @@
                 .localEndpoint(localEndpoint)
                 .addAnnotation(1505855799000000L + 433901068L / 1000, "RECEIVED")
                 .addAnnotation(1505855799000000L + 459486280L / 1000, "SENT")
-                .putTag("census.status_code", "OK")
+                .putTag(ZipkinExporterHandler.STATUS_CODE, "OK")
                 .putTag("string", "string value")
                 .putTag("boolean", "false")
                 .putTag("long", "9999")
                 .build());
   }
+
+  @Test
+  public void generateSpan_WithErrorStatus() {
+    String errorMessage = "timeout";
+    SpanData data =
+        SpanData.create(
+            SpanContext.create(
+                TraceId.fromLowerBase16(TRACE_ID),
+                SpanId.fromLowerBase16(SPAN_ID),
+                TraceOptions.builder().setIsSampled(true).build()),
+            SpanId.fromLowerBase16(PARENT_SPAN_ID),
+            true, /* hasRemoteParent */
+            "Recv.helloworld.Greeter.SayHello", /* name */
+            Kind.SERVER, /* kind */
+            Timestamp.create(1505855794, 194009601) /* startTimestamp */,
+            Attributes.create(attributes, 0 /* droppedAttributesCount */),
+            TimedEvents.create(annotations, 0 /* droppedEventsCount */),
+            TimedEvents.create(messageEvents, 0 /* droppedEventsCount */),
+            Links.create(Collections.<Link>emptyList(), 0 /* droppedLinksCount */),
+            null, /* childSpanCount */
+            Status.DEADLINE_EXCEEDED.withDescription(errorMessage),
+            Timestamp.create(1505855799, 465726528) /* endTimestamp */);
+
+    assertThat(ZipkinExporterHandler.generateSpan(data, localEndpoint))
+        .isEqualTo(
+            Span.newBuilder()
+                .traceId(TRACE_ID)
+                .parentId(PARENT_SPAN_ID)
+                .id(SPAN_ID)
+                .kind(Span.Kind.SERVER)
+                .name(data.getName())
+                .timestamp(1505855794000000L + 194009601L / 1000)
+                .duration(
+                    (1505855799000000L + 465726528L / 1000)
+                        - (1505855794000000L + 194009601L / 1000))
+                .localEndpoint(localEndpoint)
+                .addAnnotation(1505855799000000L + 433901068L / 1000, "RECEIVED")
+                .addAnnotation(1505855799000000L + 459486280L / 1000, "SENT")
+                .putTag(ZipkinExporterHandler.STATUS_CODE, "DEADLINE_EXCEEDED")
+                .putTag(ZipkinExporterHandler.STATUS_DESCRIPTION, errorMessage)
+                .putTag(ZipkinExporterHandler.STATUS_ERROR, "DEADLINE_EXCEEDED")
+                .build());
+  }
 }
diff --git a/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporterTest.java b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporterTest.java
index 2a032d0..7b918cd 100644
--- a/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporterTest.java
+++ b/exporters/trace/zipkin/src/test/java/io/opencensus/exporter/trace/zipkin/ZipkinTraceExporterTest.java
@@ -16,8 +16,8 @@
 
 package io.opencensus.exporter.trace.zipkin;
 
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
 import static org.mockito.Mockito.verify;
 
 import io.opencensus.trace.export.SpanExporter;
diff --git a/impl/build.gradle b/impl/build.gradle
index 6dacddd..a11896e 100644
--- a/impl/build.gradle
+++ b/impl/build.gradle
@@ -12,9 +12,6 @@
             project(':opencensus-impl-core'),
             libraries.disruptor
 
-    testCompile project(':opencensus-api'),
-            project(':opencensus-impl-core')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
 }
 
diff --git a/impl/src/main/java/io/opencensus/impl/internal/DisruptorEventQueue.java b/impl/src/main/java/io/opencensus/impl/internal/DisruptorEventQueue.java
index a0445b5..59d41b5 100644
--- a/impl/src/main/java/io/opencensus/impl/internal/DisruptorEventQueue.java
+++ b/impl/src/main/java/io/opencensus/impl/internal/DisruptorEventQueue.java
@@ -102,18 +102,12 @@
 
   // The event queue is built on this {@link Disruptor}.
   private final Disruptor<DisruptorEvent> disruptor;
-  // Ring Buffer for the {@link Disruptor} that underlies the queue.
-  private final RingBuffer<DisruptorEvent> ringBuffer;
 
   private volatile DisruptorEnqueuer enqueuer;
 
   // Creates a new EventQueue. Private to prevent creation of non-singleton instance.
-  private DisruptorEventQueue(
-      Disruptor<DisruptorEvent> disruptor,
-      RingBuffer<DisruptorEvent> ringBuffer,
-      DisruptorEnqueuer enqueuer) {
+  private DisruptorEventQueue(Disruptor<DisruptorEvent> disruptor, DisruptorEnqueuer enqueuer) {
     this.disruptor = disruptor;
-    this.ringBuffer = ringBuffer;
     this.enqueuer = enqueuer;
   }
 
@@ -128,7 +122,7 @@
             DISRUPTOR_BUFFER_SIZE,
             new DaemonThreadFactory("OpenCensus.Disruptor"),
             ProducerType.MULTI,
-            new SleepingWaitStrategy());
+            new SleepingWaitStrategy(0, 1000 * 1000));
     disruptor.handleEventsWith(new DisruptorEventHandler[] {DisruptorEventHandler.INSTANCE});
     disruptor.start();
     final RingBuffer<DisruptorEvent> ringBuffer = disruptor.getRingBuffer();
@@ -146,7 +140,7 @@
             }
           }
         };
-    return new DisruptorEventQueue(disruptor, ringBuffer, enqueuer);
+    return new DisruptorEventQueue(disruptor, enqueuer);
   }
 
   /**
diff --git a/impl_core/build.gradle b/impl_core/build.gradle
index 21158c3..725c695 100644
--- a/impl_core/build.gradle
+++ b/impl_core/build.gradle
@@ -6,8 +6,7 @@
 
     compileOnly libraries.auto_value
 
-    testCompile project(':opencensus-api'),
-            project(':opencensus-testing')
+    testCompile project(':opencensus-testing')
 
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
diff --git a/impl_core/src/jmh/java/io/opencensus/implcore/trace/propagation/TraceContextImplBenchmark.java b/impl_core/src/jmh/java/io/opencensus/implcore/trace/propagation/TraceContextImplBenchmark.java
new file mode 100644
index 0000000..d8388a6
--- /dev/null
+++ b/impl_core/src/jmh/java/io/opencensus/implcore/trace/propagation/TraceContextImplBenchmark.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.trace.propagation;
+
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+/** Benchmarks for {@link io.opencensus.implcore.trace.propagation.TraceContextFormat}. */
+@State(Scope.Benchmark)
+public class TraceContextImplBenchmark {
+  @State(Scope.Thread)
+  public static class Data {
+    private TextFormatBenchmarkBase textFormatBase;
+    private SpanContext spanContext;
+    private Map<String, String> spanContextHeaders;
+
+    @Setup
+    public void setup() {
+      textFormatBase =
+          new TextFormatBenchmarkBase(Tracing.getPropagationComponent().getTraceContextFormat());
+      Random random = new Random(1234);
+      spanContext =
+          SpanContext.create(
+              TraceId.generateRandomId(random),
+              SpanId.generateRandomId(random),
+              TraceOptions.builder().setIsSampled(random.nextBoolean()).build(),
+              Tracestate.builder().build());
+      spanContextHeaders = new HashMap<String, String>();
+      textFormatBase.inject(spanContext, spanContextHeaders);
+    }
+  }
+
+  /**
+   * This benchmark attempts to measure performance of {@link TextFormat#inject(SpanContext, Object,
+   * Setter)}.
+   */
+  @Benchmark
+  @BenchmarkMode(Mode.SampleTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public Map<String, String> inject(Data data) {
+    Map<String, String> carrier = new HashMap<String, String>();
+    data.textFormatBase.inject(data.spanContext, carrier);
+    return carrier;
+  }
+
+  /**
+   * This benchmark attempts to measure performance of {@link TextFormat#extract(Object, Getter)}.
+   */
+  @Benchmark
+  @BenchmarkMode(Mode.SampleTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public SpanContext extract(Data data) throws SpanContextParseException {
+    return data.textFormatBase.extract(data.spanContextHeaders);
+  }
+
+  /**
+   * This benchmark attempts to measure performance of {@link TextFormat#inject(SpanContext, Object,
+   * Setter)} then {@link TextFormat#extract(Object, Getter)}.
+   */
+  @Benchmark
+  @BenchmarkMode(Mode.SampleTime)
+  @OutputTimeUnit(TimeUnit.NANOSECONDS)
+  public SpanContext injectExtract(Data data) throws SpanContextParseException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    data.textFormatBase.inject(data.spanContext, carrier);
+    return data.textFormatBase.extract(carrier);
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/internal/CurrentState.java b/impl_core/src/main/java/io/opencensus/implcore/internal/CurrentState.java
index d7b1b11..bcaf7f9 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/internal/CurrentState.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/internal/CurrentState.java
@@ -36,16 +36,16 @@
 
   private enum InternalState {
     // Enabled and not read.
-    ENABLED_NOT_READ(State.ENABLED, false),
+    ENABLED_NOT_READ(State.ENABLED, /*isRead=*/ false),
 
     // Enabled and read.
-    ENABLED_READ(State.ENABLED, true),
+    ENABLED_READ(State.ENABLED, /*isRead=*/ true),
 
     // Disable and not read.
-    DISABLED_NOT_READ(State.DISABLED, false),
+    DISABLED_NOT_READ(State.DISABLED, /*isRead=*/ false),
 
     // Disable and read.
-    DISABLED_READ(State.DISABLED, true);
+    DISABLED_READ(State.DISABLED, /*isRead=*/ true);
 
     private final State state;
     private final boolean isRead;
diff --git a/impl_core/src/main/java/io/opencensus/implcore/internal/DaemonThreadFactory.java b/impl_core/src/main/java/io/opencensus/implcore/internal/DaemonThreadFactory.java
index 2baa500..ee3b4a2 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/internal/DaemonThreadFactory.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/internal/DaemonThreadFactory.java
@@ -16,19 +16,12 @@
 
 package io.opencensus.implcore.internal;
 
-import com.google.common.util.concurrent.MoreExecutors;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.atomic.AtomicInteger;
 
 /** A {@link ThreadFactory} implementation that starts all {@link Thread} as daemons. */
 public final class DaemonThreadFactory implements ThreadFactory {
-  // AppEngine runtimes have constraints on threading and socket handling
-  // that need to be accommodated.
-  public static final boolean IS_RESTRICTED_APPENGINE =
-      System.getProperty("com.google.appengine.runtime.environment") != null
-          && "1.7".equals(System.getProperty("java.specification.version"));
   private static final String DELIMITER = "-";
-  private static final ThreadFactory threadFactory = MoreExecutors.platformThreadFactory();
   private final AtomicInteger threadIdGen = new AtomicInteger();
   private final String threadPrefix;
 
@@ -43,10 +36,12 @@
 
   @Override
   public Thread newThread(Runnable r) {
-    Thread thread = threadFactory.newThread(r);
-    if (!IS_RESTRICTED_APPENGINE) {
+    Thread thread = new Thread(r);
+    try {
       thread.setName(threadPrefix + threadIdGen.getAndIncrement());
       thread.setDaemon(true);
+    } catch (SecurityException e) {
+      // OK if we can't set the name or daemon in this environment.
     }
     return thread;
   }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImpl.java
new file mode 100644
index 0000000..3d0966f
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImpl.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import io.opencensus.common.Clock;
+import io.opencensus.common.Timestamp;
+import io.opencensus.common.ToDoubleFunction;
+import io.opencensus.implcore.internal.Utils;
+import io.opencensus.metrics.DerivedDoubleCumulative;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.concurrent.GuardedBy;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/** Implementation of {@link DerivedDoubleCumulative}. */
+public final class DerivedDoubleCumulativeImpl extends DerivedDoubleCumulative implements Meter {
+  private final MetricDescriptor metricDescriptor;
+  private final int labelKeysSize;
+  private final List<LabelValue> constantLabelValues;
+  private final Timestamp startTime;
+
+  private volatile Map<List<LabelValue>, PointWithFunction<?>> registeredPoints =
+      Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
+
+  DerivedDoubleCumulativeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels,
+      Timestamp startTime) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
+    this.metricDescriptor =
+        MetricDescriptor.create(name, description, unit, Type.CUMULATIVE_DOUBLE, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
+    this.startTime = startTime;
+  }
+
+  @Override
+  public synchronized <T> void createTimeSeries(
+      List<LabelValue> labelValues,
+      @javax.annotation.Nullable T obj,
+      ToDoubleFunction</*@Nullable*/ T> function) {
+    Utils.checkListElementNotNull(checkNotNull(labelValues, "labelValues"), "labelValue");
+    List<LabelValue> labelValuesCopy = new ArrayList<LabelValue>(labelValues);
+    labelValuesCopy.addAll(constantLabelValues);
+
+    checkArgument(
+        labelKeysSize == labelValuesCopy.size(),
+        "Label Keys and Label Values don't have same size.");
+    checkNotNull(function, "function");
+
+    PointWithFunction<?> existingPoint =
+        registeredPoints.get(Collections.unmodifiableList(labelValuesCopy));
+    if (existingPoint != null) {
+      throw new IllegalArgumentException(
+          "A different time series with the same labels already exists.");
+    }
+
+    PointWithFunction<T> newPoint =
+        new PointWithFunction<T>(labelValuesCopy, obj, function, startTime);
+    // Updating the map of time series happens under a lock to avoid multiple add operations
+    // to happen in the same time.
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    registeredPointsCopy.put(labelValuesCopy, newPoint);
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
+      // The element not present, no need to update the current map of time series.
+      return;
+    }
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void clear() {
+    registeredPoints = Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
+  }
+
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  @javax.annotation.Nullable
+  @Override
+  public Metric getMetric(Clock clock) {
+    Map<List<LabelValue>, PointWithFunction<?>> currentRegisteredPoints = registeredPoints;
+    if (currentRegisteredPoints.isEmpty()) {
+      return null;
+    }
+
+    if (currentRegisteredPoints.size() == 1) {
+      PointWithFunction<?> point = currentRegisteredPoints.values().iterator().next();
+      return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
+    }
+
+    List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
+    for (Map.Entry<List<LabelValue>, PointWithFunction<?>> entry :
+        currentRegisteredPoints.entrySet()) {
+      timeSeriesList.add(entry.getValue().getTimeSeries(clock));
+    }
+    return Metric.create(metricDescriptor, timeSeriesList);
+  }
+
+  /** Implementation of {@link PointWithFunction} with an object and a callback function. */
+  public static final class PointWithFunction<T> {
+    private final List<LabelValue> labelValues;
+    private final Timestamp startTime;
+    @javax.annotation.Nullable private final WeakReference<T> ref;
+    private final ToDoubleFunction</*@Nullable*/ T> function;
+
+    @GuardedBy("this")
+    private double value = 0.0;
+
+    PointWithFunction(
+        List<LabelValue> labelValues,
+        @javax.annotation.Nullable T obj,
+        ToDoubleFunction</*@Nullable*/ T> function,
+        Timestamp startTime) {
+      this.labelValues = labelValues;
+      this.startTime = startTime;
+      ref = obj != null ? new WeakReference<T>(obj) : null;
+      this.function = function;
+    }
+
+    private TimeSeries getTimeSeries(Clock clock) {
+      final T obj = ref != null ? ref.get() : null;
+      double newValue = function.applyAsDouble(obj);
+      Point point;
+      synchronized (this) {
+        value = newValue > value ? newValue : value;
+        point = Point.create(Value.doubleValue(value), clock.now());
+      }
+      return TimeSeries.createWithOnePoint(labelValues, point, startTime);
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImpl.java
index b7104c9..3c63a60 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImpl.java
@@ -37,6 +37,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 /*>>>
 import org.checkerframework.checker.nullness.qual.Nullable;
@@ -46,54 +47,69 @@
 public final class DerivedDoubleGaugeImpl extends DerivedDoubleGauge implements Meter {
   private final MetricDescriptor metricDescriptor;
   private final int labelKeysSize;
+  private final List<LabelValue> constantLabelValues;
 
-  @SuppressWarnings("rawtypes")
-  private volatile Map<List<LabelValue>, PointWithFunction> registeredPoints =
-      Collections.<List<LabelValue>, PointWithFunction>emptyMap();
+  private volatile Map<List<LabelValue>, PointWithFunction<?>> registeredPoints =
+      Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
 
-  DerivedDoubleGaugeImpl(String name, String description, String unit, List<LabelKey> labelKeys) {
-    labelKeysSize = labelKeys.size();
+  DerivedDoubleGaugeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
     this.metricDescriptor =
-        MetricDescriptor.create(name, description, unit, Type.GAUGE_DOUBLE, labelKeys);
+        MetricDescriptor.create(name, description, unit, Type.GAUGE_DOUBLE, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized <T> void createTimeSeries(
       List<LabelValue> labelValues,
-      /*@Nullable*/ T obj,
+      @javax.annotation.Nullable T obj,
       ToDoubleFunction</*@Nullable*/ T> function) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-    checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+    Utils.checkListElementNotNull(checkNotNull(labelValues, "labelValues"), "labelValue");
+    List<LabelValue> labelValuesCopy = new ArrayList<LabelValue>(labelValues);
+    labelValuesCopy.addAll(constantLabelValues);
+
+    checkArgument(
+        labelKeysSize == labelValuesCopy.size(),
+        "Label Keys and Label Values don't have same size.");
     checkNotNull(function, "function");
 
-    List<LabelValue> labelValuesCopy =
-        Collections.<LabelValue>unmodifiableList(new ArrayList<LabelValue>(labelValues));
-
-    PointWithFunction existingPoint = registeredPoints.get(labelValuesCopy);
+    PointWithFunction<?> existingPoint =
+        registeredPoints.get(Collections.unmodifiableList(labelValuesCopy));
     if (existingPoint != null) {
       throw new IllegalArgumentException(
           "A different time series with the same labels already exists.");
     }
 
-    PointWithFunction newPoint = new PointWithFunction<T>(labelValuesCopy, obj, function);
+    PointWithFunction<T> newPoint = new PointWithFunction<T>(labelValuesCopy, obj, function);
     // Updating the map of time series happens under a lock to avoid multiple add operations
     // to happen in the same time.
-    Map<List<LabelValue>, PointWithFunction> registeredPointsCopy =
-        new LinkedHashMap<List<LabelValue>, PointWithFunction>(registeredPoints);
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
     registeredPointsCopy.put(labelValuesCopy, newPoint);
     registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
-    checkNotNull(labelValues, "labelValues");
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
 
-    Map<List<LabelValue>, PointWithFunction> registeredPointsCopy =
-        new LinkedHashMap<List<LabelValue>, PointWithFunction>(registeredPoints);
-    if (registeredPointsCopy.remove(labelValues) == null) {
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
       // The element not present, no need to update the current map of time series.
       return;
     }
@@ -101,27 +117,30 @@
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized void clear() {
-    registeredPoints = Collections.<List<LabelValue>, PointWithFunction>emptyMap();
+    registeredPoints = Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
   }
 
-  /*@Nullable*/
   @Override
-  @SuppressWarnings("rawtypes")
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  @javax.annotation.Nullable
+  @Override
   public Metric getMetric(Clock clock) {
-    Map<List<LabelValue>, PointWithFunction> currentRegisteredPoints = registeredPoints;
+    Map<List<LabelValue>, PointWithFunction<?>> currentRegisteredPoints = registeredPoints;
     if (currentRegisteredPoints.isEmpty()) {
       return null;
     }
 
     if (currentRegisteredPoints.size() == 1) {
-      PointWithFunction point = currentRegisteredPoints.values().iterator().next();
+      PointWithFunction<?> point = currentRegisteredPoints.values().iterator().next();
       return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
     }
 
     List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
-    for (Map.Entry<List<LabelValue>, PointWithFunction> entry :
+    for (Map.Entry<List<LabelValue>, PointWithFunction<?>> entry :
         currentRegisteredPoints.entrySet()) {
       timeSeriesList.add(entry.getValue().getTimeSeries(clock));
     }
@@ -130,15 +149,15 @@
 
   /** Implementation of {@link PointWithFunction} with an object and a callback function. */
   public static final class PointWithFunction<T> {
-    private final List<LabelValue> labelValues;
+    private final TimeSeries defaultTimeSeries;
     @javax.annotation.Nullable private final WeakReference<T> ref;
     private final ToDoubleFunction</*@Nullable*/ T> function;
 
     PointWithFunction(
         List<LabelValue> labelValues,
-        /*@Nullable*/ T obj,
+        @javax.annotation.Nullable T obj,
         ToDoubleFunction</*@Nullable*/ T> function) {
-      this.labelValues = labelValues;
+      defaultTimeSeries = TimeSeries.create(labelValues);
       ref = obj != null ? new WeakReference<T>(obj) : null;
       this.function = function;
     }
@@ -146,10 +165,7 @@
     private TimeSeries getTimeSeries(Clock clock) {
       final T obj = ref != null ? ref.get() : null;
       double value = function.applyAsDouble(obj);
-
-      // TODO(mayurkale): OPTIMIZATION: Avoid re-evaluate the labelValues all the time (issue#1490).
-      return TimeSeries.createWithOnePoint(
-          labelValues, Point.create(Value.doubleValue(value), clock.now()), null);
+      return defaultTimeSeries.setPoint(Point.create(Value.doubleValue(value), clock.now()));
     }
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImpl.java
new file mode 100644
index 0000000..08e627a
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImpl.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import io.opencensus.common.Clock;
+import io.opencensus.common.Timestamp;
+import io.opencensus.common.ToLongFunction;
+import io.opencensus.implcore.internal.Utils;
+import io.opencensus.metrics.DerivedLongCumulative;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.concurrent.GuardedBy;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/** Implementation of {@link DerivedLongCumulative}. */
+public final class DerivedLongCumulativeImpl extends DerivedLongCumulative implements Meter {
+  private final MetricDescriptor metricDescriptor;
+  private final int labelKeysSize;
+  private final List<LabelValue> constantLabelValues;
+  private final Timestamp startTime;
+
+  private volatile Map<List<LabelValue>, PointWithFunction<?>> registeredPoints =
+      Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
+
+  DerivedLongCumulativeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels,
+      Timestamp startTime) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
+    this.metricDescriptor =
+        MetricDescriptor.create(name, description, unit, Type.CUMULATIVE_INT64, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
+    this.startTime = startTime;
+  }
+
+  @Override
+  public synchronized <T> void createTimeSeries(
+      List<LabelValue> labelValues,
+      @javax.annotation.Nullable T obj,
+      ToLongFunction</*@Nullable*/ T> function) {
+    checkNotNull(function, "function");
+    Utils.checkListElementNotNull(checkNotNull(labelValues, "labelValues"), "labelValue");
+    List<LabelValue> labelValuesCopy = new ArrayList<LabelValue>(labelValues);
+    labelValuesCopy.addAll(constantLabelValues);
+
+    checkArgument(
+        labelKeysSize == labelValuesCopy.size(),
+        "Label Keys and Label Values don't have same size.");
+
+    PointWithFunction<?> existingPoint =
+        registeredPoints.get(Collections.unmodifiableList(labelValuesCopy));
+    if (existingPoint != null) {
+      throw new IllegalArgumentException(
+          "A different time series with the same labels already exists.");
+    }
+
+    PointWithFunction<T> newPoint =
+        new PointWithFunction<T>(labelValuesCopy, obj, function, startTime);
+    // Updating the map of time series happens under a lock to avoid multiple add operations
+    // to happen in the same time.
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    registeredPointsCopy.put(labelValuesCopy, newPoint);
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
+      // The element not present, no need to update the current map of time series.
+      return;
+    }
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void clear() {
+    registeredPoints = Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
+  }
+
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  @javax.annotation.Nullable
+  @Override
+  public Metric getMetric(Clock clock) {
+    Map<List<LabelValue>, PointWithFunction<?>> currentRegisteredPoints = registeredPoints;
+    if (currentRegisteredPoints.isEmpty()) {
+      return null;
+    }
+
+    if (currentRegisteredPoints.size() == 1) {
+      PointWithFunction<?> point = currentRegisteredPoints.values().iterator().next();
+      return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
+    }
+
+    List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
+    for (Map.Entry<List<LabelValue>, PointWithFunction<?>> entry :
+        currentRegisteredPoints.entrySet()) {
+      timeSeriesList.add(entry.getValue().getTimeSeries(clock));
+    }
+    return Metric.create(metricDescriptor, timeSeriesList);
+  }
+
+  /** Implementation of {@link PointWithFunction} with an object and a callback function. */
+  public static final class PointWithFunction<T> {
+    private final List<LabelValue> labelValues;
+    private final Timestamp startTime;
+    @javax.annotation.Nullable private final WeakReference<T> ref;
+    private final ToLongFunction</*@Nullable*/ T> function;
+
+    @GuardedBy("this")
+    private long value = 0;
+
+    PointWithFunction(
+        List<LabelValue> labelValues,
+        @javax.annotation.Nullable T obj,
+        ToLongFunction</*@Nullable*/ T> function,
+        Timestamp startTime) {
+      this.labelValues = labelValues;
+      this.startTime = startTime;
+      ref = obj != null ? new WeakReference<T>(obj) : null;
+      this.function = function;
+    }
+
+    private TimeSeries getTimeSeries(Clock clock) {
+      final T obj = ref != null ? ref.get() : null;
+      long newValue = function.applyAsLong(obj);
+      Point point;
+      synchronized (this) {
+        value = newValue > value ? newValue : value;
+        point = Point.create(Value.longValue(value), clock.now());
+      }
+      return TimeSeries.createWithOnePoint(labelValues, point, startTime);
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongGaugeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongGaugeImpl.java
index 90e3e70..8a27c59 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongGaugeImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DerivedLongGaugeImpl.java
@@ -37,6 +37,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 /*>>>
 import org.checkerframework.checker.nullness.qual.Nullable;
@@ -46,52 +47,69 @@
 public final class DerivedLongGaugeImpl extends DerivedLongGauge implements Meter {
   private final MetricDescriptor metricDescriptor;
   private final int labelKeysSize;
+  private final List<LabelValue> constantLabelValues;
 
-  @SuppressWarnings("rawtypes")
-  private volatile Map<List<LabelValue>, PointWithFunction> registeredPoints =
-      Collections.<List<LabelValue>, PointWithFunction>emptyMap();
+  private volatile Map<List<LabelValue>, PointWithFunction<?>> registeredPoints =
+      Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
 
-  DerivedLongGaugeImpl(String name, String description, String unit, List<LabelKey> labelKeys) {
-    labelKeysSize = labelKeys.size();
+  DerivedLongGaugeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
     this.metricDescriptor =
-        MetricDescriptor.create(name, description, unit, Type.GAUGE_INT64, labelKeys);
+        MetricDescriptor.create(name, description, unit, Type.GAUGE_INT64, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized <T> void createTimeSeries(
-      List<LabelValue> labelValues, /*@Nullable*/ T obj, ToLongFunction</*@Nullable*/ T> function) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelValues, "labelValues"), "labelValue element should not be null.");
-    checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
+      List<LabelValue> labelValues,
+      @javax.annotation.Nullable T obj,
+      ToLongFunction</*@Nullable*/ T> function) {
     checkNotNull(function, "function");
+    Utils.checkListElementNotNull(checkNotNull(labelValues, "labelValues"), "labelValue");
+    List<LabelValue> labelValuesCopy = new ArrayList<LabelValue>(labelValues);
+    labelValuesCopy.addAll(constantLabelValues);
 
-    List<LabelValue> labelValuesCopy =
-        Collections.unmodifiableList(new ArrayList<LabelValue>(labelValues));
+    checkArgument(
+        labelKeysSize == labelValuesCopy.size(),
+        "Label Keys and Label Values don't have same size.");
 
-    PointWithFunction existingPoint = registeredPoints.get(labelValuesCopy);
+    PointWithFunction<?> existingPoint =
+        registeredPoints.get(Collections.unmodifiableList(labelValuesCopy));
     if (existingPoint != null) {
       throw new IllegalArgumentException(
           "A different time series with the same labels already exists.");
     }
 
-    PointWithFunction newPoint = new PointWithFunction<T>(labelValuesCopy, obj, function);
+    PointWithFunction<T> newPoint = new PointWithFunction<T>(labelValuesCopy, obj, function);
     // Updating the map of time series happens under a lock to avoid multiple add operations
     // to happen in the same time.
-    Map<List<LabelValue>, PointWithFunction> registeredPointsCopy =
-        new LinkedHashMap<List<LabelValue>, PointWithFunction>(registeredPoints);
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
     registeredPointsCopy.put(labelValuesCopy, newPoint);
     registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
-    checkNotNull(labelValues, "labelValues");
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
 
-    Map<List<LabelValue>, PointWithFunction> registeredPointsCopy =
-        new LinkedHashMap<List<LabelValue>, PointWithFunction>(registeredPoints);
-    if (registeredPointsCopy.remove(labelValues) == null) {
+    Map<List<LabelValue>, PointWithFunction<?>> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointWithFunction<?>>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
       // The element not present, no need to update the current map of time series.
       return;
     }
@@ -99,27 +117,30 @@
   }
 
   @Override
-  @SuppressWarnings("rawtypes")
   public synchronized void clear() {
-    registeredPoints = Collections.<List<LabelValue>, PointWithFunction>emptyMap();
+    registeredPoints = Collections.<List<LabelValue>, PointWithFunction<?>>emptyMap();
   }
 
-  /*@Nullable*/
   @Override
-  @SuppressWarnings("rawtypes")
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  @javax.annotation.Nullable
+  @Override
   public Metric getMetric(Clock clock) {
-    Map<List<LabelValue>, PointWithFunction> currentRegisteredPoints = registeredPoints;
+    Map<List<LabelValue>, PointWithFunction<?>> currentRegisteredPoints = registeredPoints;
     if (currentRegisteredPoints.isEmpty()) {
       return null;
     }
 
     if (currentRegisteredPoints.size() == 1) {
-      PointWithFunction point = currentRegisteredPoints.values().iterator().next();
+      PointWithFunction<?> point = currentRegisteredPoints.values().iterator().next();
       return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
     }
 
     List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
-    for (Map.Entry<List<LabelValue>, PointWithFunction> entry :
+    for (Map.Entry<List<LabelValue>, PointWithFunction<?>> entry :
         currentRegisteredPoints.entrySet()) {
       timeSeriesList.add(entry.getValue().getTimeSeries(clock));
     }
@@ -128,15 +149,15 @@
 
   /** Implementation of {@link PointWithFunction} with an object and a callback function. */
   public static final class PointWithFunction<T> {
-    private final List<LabelValue> labelValues;
+    private final TimeSeries defaultTimeSeries;
     @javax.annotation.Nullable private final WeakReference<T> ref;
     private final ToLongFunction</*@Nullable*/ T> function;
 
     PointWithFunction(
         List<LabelValue> labelValues,
-        /*@Nullable*/ T obj,
+        @javax.annotation.Nullable T obj,
         ToLongFunction</*@Nullable*/ T> function) {
-      this.labelValues = labelValues;
+      defaultTimeSeries = TimeSeries.create(labelValues);
       ref = obj != null ? new WeakReference<T>(obj) : null;
       this.function = function;
     }
@@ -144,10 +165,7 @@
     private TimeSeries getTimeSeries(Clock clock) {
       final T obj = ref != null ? ref.get() : null;
       long value = function.applyAsLong(obj);
-
-      // TODO(mayurkale): OPTIMIZATION: Avoid re-evaluate the labelValues all the time (issue#1490).
-      return TimeSeries.createWithOnePoint(
-          labelValues, Point.create(Value.longValue(value), clock.now()), null);
+      return defaultTimeSeries.setPoint(Point.create(Value.longValue(value), clock.now()));
     }
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleCumulativeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleCumulativeImpl.java
new file mode 100644
index 0000000..5f4f7c6
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleCumulativeImpl.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.AtomicDouble;
+import io.opencensus.common.Clock;
+import io.opencensus.common.Timestamp;
+import io.opencensus.implcore.internal.Utils;
+import io.opencensus.metrics.DoubleCumulative;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+
+/** Implementation of {@link DoubleCumulative}. */
+public final class DoubleCumulativeImpl extends DoubleCumulative implements Meter {
+  @VisibleForTesting static final LabelValue UNSET_VALUE = LabelValue.create(null);
+
+  private final MetricDescriptor metricDescriptor;
+  private volatile Map<List<LabelValue>, PointImpl> registeredPoints =
+      Collections.<List<LabelValue>, PointImpl>emptyMap();
+  private final int labelKeysSize;
+  private final List<LabelValue> defaultLabelValues;
+  private final List<LabelValue> constantLabelValues;
+  private final Timestamp startTime;
+
+  DoubleCumulativeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels,
+      Timestamp startTime) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
+    this.metricDescriptor =
+        MetricDescriptor.create(name, description, unit, Type.CUMULATIVE_DOUBLE, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
+    this.startTime = startTime;
+
+    // initialize defaultLabelValues
+    defaultLabelValues = new ArrayList<LabelValue>(labelKeys.size());
+    for (int i = 0; i < labelKeys.size(); i++) {
+      defaultLabelValues.add(UNSET_VALUE);
+    }
+    defaultLabelValues.addAll(constantLabelValues);
+  }
+
+  @Override
+  public DoublePoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
+    // lock free point retrieval, if it is present
+    PointImpl existingPoint = registeredPoints.get(labelValues);
+    if (existingPoint != null) {
+      return existingPoint;
+    }
+
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+    return registerTimeSeries(Collections.unmodifiableList(labelValuesCopy));
+  }
+
+  @Override
+  public DoublePoint getDefaultTimeSeries() {
+    // lock free default point retrieval, if it is present
+    PointImpl existingPoint = registeredPoints.get(defaultLabelValues);
+    if (existingPoint != null) {
+      return existingPoint;
+    }
+    return registerTimeSeries(Collections.unmodifiableList(defaultLabelValues));
+  }
+
+  @Override
+  public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+
+    Map<List<LabelValue>, PointImpl> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
+      // The element not present, no need to update the current map of points.
+      return;
+    }
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void clear() {
+    registeredPoints = Collections.<List<LabelValue>, PointImpl>emptyMap();
+  }
+
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  private synchronized DoublePoint registerTimeSeries(List<LabelValue> labelValues) {
+    PointImpl existingPoint = registeredPoints.get(labelValues);
+    if (existingPoint != null) {
+      // Return a Point that are already registered. This can happen if a multiple threads
+      // concurrently try to register the same {@code TimeSeries}.
+      return existingPoint;
+    }
+
+    checkArgument(
+        labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+    Utils.checkListElementNotNull(labelValues, "labelValue");
+
+    PointImpl newPoint = new PointImpl(labelValues, startTime);
+    // Updating the map of points happens under a lock to avoid multiple add operations
+    // to happen in the same time.
+    Map<List<LabelValue>, PointImpl> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
+    registeredPointsCopy.put(labelValues, newPoint);
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+
+    return newPoint;
+  }
+
+  @Nullable
+  @Override
+  public Metric getMetric(Clock clock) {
+    Map<List<LabelValue>, PointImpl> currentRegisteredPoints = registeredPoints;
+    if (currentRegisteredPoints.isEmpty()) {
+      return null;
+    }
+
+    if (currentRegisteredPoints.size() == 1) {
+      PointImpl point = currentRegisteredPoints.values().iterator().next();
+      return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
+    }
+
+    List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
+    for (Map.Entry<List<LabelValue>, PointImpl> entry : currentRegisteredPoints.entrySet()) {
+      timeSeriesList.add(entry.getValue().getTimeSeries(clock));
+    }
+    return Metric.create(metricDescriptor, timeSeriesList);
+  }
+
+  /** Implementation of {@link DoubleCumulative.DoublePoint}. */
+  public static final class PointImpl extends DoublePoint {
+
+    private final List<LabelValue> labelValues;
+    private final Timestamp startTime;
+    private final AtomicDouble value = new AtomicDouble();
+
+    PointImpl(List<LabelValue> labelValues, Timestamp startTime) {
+      this.labelValues = labelValues;
+      this.startTime = startTime;
+    }
+
+    @Override
+    public synchronized void add(double delta) {
+      if (delta <= 0) {
+        return;
+      }
+      value.addAndGet(delta);
+    }
+
+    private synchronized TimeSeries getTimeSeries(Clock clock) {
+      Point point = Point.create(Value.doubleValue(value.get()), clock.now());
+      return TimeSeries.createWithOnePoint(labelValues, point, startTime);
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleGaugeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleGaugeImpl.java
index c314e98..87f17ab 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleGaugeImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/DoubleGaugeImpl.java
@@ -37,6 +37,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import javax.annotation.Nullable;
 
 /** Implementation of {@link DoubleGauge}. */
@@ -48,17 +49,32 @@
       Collections.<List<LabelValue>, PointImpl>emptyMap();
   private final int labelKeysSize;
   private final List<LabelValue> defaultLabelValues;
+  private final List<LabelValue> constantLabelValues;
 
-  DoubleGaugeImpl(String name, String description, String unit, List<LabelKey> labelKeys) {
-    labelKeysSize = labelKeys.size();
+  DoubleGaugeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
     this.metricDescriptor =
-        MetricDescriptor.create(name, description, unit, Type.GAUGE_DOUBLE, labelKeys);
+        MetricDescriptor.create(name, description, unit, Type.GAUGE_DOUBLE, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
 
     // initialize defaultLabelValues
-    defaultLabelValues = new ArrayList<LabelValue>(labelKeysSize);
-    for (int i = 0; i < labelKeysSize; i++) {
+    defaultLabelValues = new ArrayList<LabelValue>(labelKeys.size());
+    for (int i = 0; i < labelKeys.size(); i++) {
       defaultLabelValues.add(UNSET_VALUE);
     }
+    defaultLabelValues.addAll(constantLabelValues);
   }
 
   @Override
@@ -70,9 +86,9 @@
     }
 
     List<LabelValue> labelValuesCopy =
-        Collections.unmodifiableList(
-            new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues")));
-    return registerTimeSeries(labelValuesCopy);
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+    return registerTimeSeries(Collections.unmodifiableList(labelValuesCopy));
   }
 
   @Override
@@ -87,11 +103,13 @@
 
   @Override
   public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
-    checkNotNull(labelValues, "labelValues");
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
 
     Map<List<LabelValue>, PointImpl> registeredPointsCopy =
         new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
-    if (registeredPointsCopy.remove(labelValues) == null) {
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
       // The element not present, no need to update the current map of points.
       return;
     }
@@ -103,6 +121,11 @@
     registeredPoints = Collections.<List<LabelValue>, PointImpl>emptyMap();
   }
 
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
   private synchronized DoublePoint registerTimeSeries(List<LabelValue> labelValues) {
     PointImpl existingPoint = registeredPoints.get(labelValues);
     if (existingPoint != null) {
@@ -111,8 +134,9 @@
       return existingPoint;
     }
 
-    checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
-    Utils.checkListElementNotNull(labelValues, "labelValue element should not be null.");
+    checkArgument(
+        labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+    Utils.checkListElementNotNull(labelValues, "labelValue");
 
     PointImpl newPoint = new PointImpl(labelValues);
     // Updating the map of points happens under a lock to avoid multiple add operations
@@ -150,10 +174,10 @@
 
     // TODO(mayurkale): Consider to use DoubleAdder here, once we upgrade to Java8.
     private final AtomicDouble value = new AtomicDouble(0);
-    private final List<LabelValue> labelValues;
+    private final TimeSeries defaultTimeSeries;
 
     PointImpl(List<LabelValue> labelValues) {
-      this.labelValues = labelValues;
+      defaultTimeSeries = TimeSeries.create(labelValues);
     }
 
     @Override
@@ -167,8 +191,7 @@
     }
 
     private TimeSeries getTimeSeries(Clock clock) {
-      return TimeSeries.createWithOnePoint(
-          labelValues, Point.create(Value.doubleValue(value.get()), clock.now()), null);
+      return defaultTimeSeries.setPoint(Point.create(Value.doubleValue(value.get()), clock.now()));
     }
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/LongCumulativeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/LongCumulativeImpl.java
new file mode 100644
index 0000000..5b7ea4a
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/LongCumulativeImpl.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.opencensus.common.Clock;
+import io.opencensus.common.Timestamp;
+import io.opencensus.implcore.internal.Utils;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.LongCumulative;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.atomic.AtomicLong;
+import javax.annotation.Nullable;
+
+/** Implementation of {@link LongCumulative}. */
+public final class LongCumulativeImpl extends LongCumulative implements Meter {
+  @VisibleForTesting static final LabelValue UNSET_VALUE = LabelValue.create(null);
+
+  private final MetricDescriptor metricDescriptor;
+  private volatile Map<List<LabelValue>, PointImpl> registeredPoints =
+      Collections.<List<LabelValue>, PointImpl>emptyMap();
+  private final int labelKeysSize;
+  private final List<LabelValue> defaultLabelValues;
+  private final List<LabelValue> constantLabelValues;
+  private final Timestamp startTime;
+
+  LongCumulativeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels,
+      Timestamp startTime) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
+    this.metricDescriptor =
+        MetricDescriptor.create(name, description, unit, Type.CUMULATIVE_INT64, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
+    this.startTime = startTime;
+
+    // initialize defaultLabelValues
+    defaultLabelValues = new ArrayList<LabelValue>(labelKeys.size());
+    for (int i = 0; i < labelKeys.size(); i++) {
+      defaultLabelValues.add(UNSET_VALUE);
+    }
+    defaultLabelValues.addAll(constantLabelValues);
+  }
+
+  @Override
+  public LongPoint getOrCreateTimeSeries(List<LabelValue> labelValues) {
+    // lock free point retrieval, if it is present
+    PointImpl existingPoint = registeredPoints.get(labelValues);
+    if (existingPoint != null) {
+      return existingPoint;
+    }
+
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+    return registerTimeSeries(Collections.unmodifiableList(labelValuesCopy));
+  }
+
+  @Override
+  public LongPoint getDefaultTimeSeries() {
+    // lock free default point retrieval, if it is present
+    PointImpl existingPoint = registeredPoints.get(defaultLabelValues);
+    if (existingPoint != null) {
+      return existingPoint;
+    }
+    return registerTimeSeries(Collections.unmodifiableList(defaultLabelValues));
+  }
+
+  @Override
+  public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+
+    Map<List<LabelValue>, PointImpl> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
+      // The element not present, no need to update the current map of points.
+      return;
+    }
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+  }
+
+  @Override
+  public synchronized void clear() {
+    registeredPoints = Collections.<List<LabelValue>, PointImpl>emptyMap();
+  }
+
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
+  private synchronized LongPoint registerTimeSeries(List<LabelValue> labelValues) {
+    PointImpl existingPoint = registeredPoints.get(labelValues);
+    if (existingPoint != null) {
+      // Return a Point that are already registered. This can happen if a multiple threads
+      // concurrently try to register the same {@code TimeSeries}.
+      return existingPoint;
+    }
+
+    checkArgument(
+        labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+    Utils.checkListElementNotNull(labelValues, "labelValue");
+
+    PointImpl newPoint = new PointImpl(labelValues, startTime);
+    // Updating the map of points happens under a lock to avoid multiple add operations
+    // to happen in the same time.
+    Map<List<LabelValue>, PointImpl> registeredPointsCopy =
+        new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
+    registeredPointsCopy.put(labelValues, newPoint);
+    registeredPoints = Collections.unmodifiableMap(registeredPointsCopy);
+
+    return newPoint;
+  }
+
+  @Nullable
+  @Override
+  public Metric getMetric(Clock clock) {
+    Map<List<LabelValue>, PointImpl> currentRegisteredPoints = registeredPoints;
+    if (currentRegisteredPoints.isEmpty()) {
+      return null;
+    }
+
+    if (currentRegisteredPoints.size() == 1) {
+      PointImpl point = currentRegisteredPoints.values().iterator().next();
+      return Metric.createWithOneTimeSeries(metricDescriptor, point.getTimeSeries(clock));
+    }
+
+    List<TimeSeries> timeSeriesList = new ArrayList<TimeSeries>(currentRegisteredPoints.size());
+    for (Map.Entry<List<LabelValue>, PointImpl> entry : currentRegisteredPoints.entrySet()) {
+      timeSeriesList.add(entry.getValue().getTimeSeries(clock));
+    }
+    return Metric.create(metricDescriptor, timeSeriesList);
+  }
+
+  /** Implementation of {@link LongCumulative.LongPoint}. */
+  public static final class PointImpl extends LongPoint {
+
+    private final List<LabelValue> labelValues;
+    private final Timestamp startTime;
+    private final AtomicLong value = new AtomicLong();
+
+    PointImpl(List<LabelValue> labelValues, Timestamp startTime) {
+      this.labelValues = labelValues;
+      this.startTime = startTime;
+    }
+
+    @Override
+    public synchronized void add(long delta) {
+      if (delta <= 0) {
+        return;
+      }
+      value.addAndGet(delta);
+    }
+
+    private synchronized TimeSeries getTimeSeries(Clock clock) {
+      Point point = Point.create(Value.longValue(value.get()), clock.now());
+      return TimeSeries.createWithOnePoint(labelValues, point, startTime);
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/LongGaugeImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/LongGaugeImpl.java
index 3460d7a..7d64953 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/LongGaugeImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/LongGaugeImpl.java
@@ -36,6 +36,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicLong;
 import javax.annotation.Nullable;
 
@@ -48,17 +49,32 @@
       Collections.<List<LabelValue>, PointImpl>emptyMap();
   private final int labelKeysSize;
   private final List<LabelValue> defaultLabelValues;
+  private final List<LabelValue> constantLabelValues;
 
-  LongGaugeImpl(String name, String description, String unit, List<LabelKey> labelKeys) {
-    labelKeysSize = labelKeys.size();
+  LongGaugeImpl(
+      String name,
+      String description,
+      String unit,
+      List<LabelKey> labelKeys,
+      Map<LabelKey, LabelValue> constantLabels) {
+    List<LabelValue> constantLabelValues = new ArrayList<LabelValue>();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    for (Entry<LabelKey, LabelValue> label : constantLabels.entrySet()) {
+      // Ensure constant label keys and values are in the same order.
+      allKeys.add(label.getKey());
+      constantLabelValues.add(label.getValue());
+    }
+    labelKeysSize = allKeys.size();
     this.metricDescriptor =
-        MetricDescriptor.create(name, description, unit, Type.GAUGE_INT64, labelKeys);
+        MetricDescriptor.create(name, description, unit, Type.GAUGE_INT64, allKeys);
+    this.constantLabelValues = Collections.unmodifiableList(constantLabelValues);
 
     // initialize defaultLabelValues
-    defaultLabelValues = new ArrayList<LabelValue>(labelKeysSize);
-    for (int i = 0; i < labelKeysSize; i++) {
+    defaultLabelValues = new ArrayList<LabelValue>(labelKeys.size());
+    for (int i = 0; i < labelKeys.size(); i++) {
       defaultLabelValues.add(UNSET_VALUE);
     }
+    defaultLabelValues.addAll(constantLabelValues);
   }
 
   @Override
@@ -70,9 +86,9 @@
     }
 
     List<LabelValue> labelValuesCopy =
-        Collections.unmodifiableList(
-            new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues")));
-    return registerTimeSeries(labelValuesCopy);
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
+    return registerTimeSeries(Collections.unmodifiableList(labelValuesCopy));
   }
 
   @Override
@@ -87,11 +103,13 @@
 
   @Override
   public synchronized void removeTimeSeries(List<LabelValue> labelValues) {
-    checkNotNull(labelValues, "labelValues");
+    List<LabelValue> labelValuesCopy =
+        new ArrayList<LabelValue>(checkNotNull(labelValues, "labelValues"));
+    labelValuesCopy.addAll(constantLabelValues);
 
     Map<List<LabelValue>, PointImpl> registeredPointsCopy =
         new LinkedHashMap<List<LabelValue>, PointImpl>(registeredPoints);
-    if (registeredPointsCopy.remove(labelValues) == null) {
+    if (registeredPointsCopy.remove(labelValuesCopy) == null) {
       // The element not present, no need to update the current map of points.
       return;
     }
@@ -103,6 +121,11 @@
     registeredPoints = Collections.<List<LabelValue>, PointImpl>emptyMap();
   }
 
+  @Override
+  public MetricDescriptor getMetricDescriptor() {
+    return metricDescriptor;
+  }
+
   private synchronized LongPoint registerTimeSeries(List<LabelValue> labelValues) {
     PointImpl existingPoint = registeredPoints.get(labelValues);
     if (existingPoint != null) {
@@ -111,8 +134,9 @@
       return existingPoint;
     }
 
-    checkArgument(labelKeysSize == labelValues.size(), "Incorrect number of labels.");
-    Utils.checkListElementNotNull(labelValues, "labelValue element should not be null.");
+    checkArgument(
+        labelKeysSize == labelValues.size(), "Label Keys and Label Values don't have same size.");
+    Utils.checkListElementNotNull(labelValues, "labelValue");
 
     PointImpl newPoint = new PointImpl(labelValues);
     // Updating the map of points happens under a lock to avoid multiple add operations
@@ -150,10 +174,10 @@
 
     // TODO(mayurkale): Consider to use LongAdder here, once we upgrade to Java8.
     private final AtomicLong value = new AtomicLong(0);
-    private final List<LabelValue> labelValues;
+    private final TimeSeries defaultTimeSeries;
 
     PointImpl(List<LabelValue> labelValues) {
-      this.labelValues = labelValues;
+      defaultTimeSeries = TimeSeries.create(labelValues);
     }
 
     @Override
@@ -167,8 +191,7 @@
     }
 
     private TimeSeries getTimeSeries(Clock clock) {
-      return TimeSeries.createWithOnePoint(
-          labelValues, Point.create(Value.longValue(value.get()), clock.now()), null);
+      return defaultTimeSeries.setPoint(Point.create(Value.longValue(value.get()), clock.now()));
     }
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/Meter.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/Meter.java
index f5a8dc8..8a256a3 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/Meter.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/Meter.java
@@ -18,6 +18,7 @@
 
 import io.opencensus.common.Clock;
 import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
 import javax.annotation.Nullable;
 
 interface Meter {
@@ -31,4 +32,11 @@
    */
   @Nullable
   Metric getMetric(Clock clock);
+
+  /**
+   * Provides a {@link io.opencensus.metrics.export.MetricDescriptor}.
+   *
+   * @return a {@code MetricDescriptor}.
+   */
+  MetricDescriptor getMetricDescriptor();
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/metrics/MetricRegistryImpl.java b/impl_core/src/main/java/io/opencensus/implcore/metrics/MetricRegistryImpl.java
index 1a301ec..862cfe9 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/metrics/MetricRegistryImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/metrics/MetricRegistryImpl.java
@@ -19,12 +19,15 @@
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import io.opencensus.common.Clock;
-import io.opencensus.implcore.internal.Utils;
+import io.opencensus.metrics.DerivedDoubleCumulative;
 import io.opencensus.metrics.DerivedDoubleGauge;
+import io.opencensus.metrics.DerivedLongCumulative;
 import io.opencensus.metrics.DerivedLongGauge;
+import io.opencensus.metrics.DoubleCumulative;
 import io.opencensus.metrics.DoubleGauge;
-import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LongCumulative;
 import io.opencensus.metrics.LongGauge;
+import io.opencensus.metrics.MetricOptions;
 import io.opencensus.metrics.MetricRegistry;
 import io.opencensus.metrics.export.Metric;
 import io.opencensus.metrics.export.MetricProducer;
@@ -39,70 +42,112 @@
 public final class MetricRegistryImpl extends MetricRegistry {
   private final RegisteredMeters registeredMeters;
   private final MetricProducer metricProducer;
+  private final Clock clock;
 
   MetricRegistryImpl(Clock clock) {
     registeredMeters = new RegisteredMeters();
     metricProducer = new MetricProducerForRegistry(registeredMeters, clock);
+    this.clock = clock;
   }
 
   @Override
-  public LongGauge addLongGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+  public LongGauge addLongGauge(String name, MetricOptions options) {
     LongGaugeImpl longGaugeMetric =
         new LongGaugeImpl(
             checkNotNull(name, "name"),
-            checkNotNull(description, "description"),
-            checkNotNull(unit, "unit"),
-            Collections.unmodifiableList(new ArrayList<LabelKey>(labelKeys)));
-    registeredMeters.registerMeter(name, longGaugeMetric);
-    return longGaugeMetric;
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels());
+    return (LongGauge) registeredMeters.registerMeter(name, longGaugeMetric);
   }
 
   @Override
-  public DoubleGauge addDoubleGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+  public DoubleGauge addDoubleGauge(String name, MetricOptions options) {
     DoubleGaugeImpl doubleGaugeMetric =
         new DoubleGaugeImpl(
             checkNotNull(name, "name"),
-            checkNotNull(description, "description"),
-            checkNotNull(unit, "unit"),
-            Collections.unmodifiableList(new ArrayList<LabelKey>(labelKeys)));
-    registeredMeters.registerMeter(name, doubleGaugeMetric);
-    return doubleGaugeMetric;
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels());
+    return (DoubleGauge) registeredMeters.registerMeter(name, doubleGaugeMetric);
   }
 
   @Override
-  public DerivedLongGauge addDerivedLongGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+  public DerivedLongGauge addDerivedLongGauge(String name, MetricOptions options) {
     DerivedLongGaugeImpl derivedLongGauge =
         new DerivedLongGaugeImpl(
             checkNotNull(name, "name"),
-            checkNotNull(description, "description"),
-            checkNotNull(unit, "unit"),
-            Collections.unmodifiableList(new ArrayList<LabelKey>(labelKeys)));
-    registeredMeters.registerMeter(name, derivedLongGauge);
-    return derivedLongGauge;
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels());
+    return (DerivedLongGauge) registeredMeters.registerMeter(name, derivedLongGauge);
   }
 
   @Override
-  public DerivedDoubleGauge addDerivedDoubleGauge(
-      String name, String description, String unit, List<LabelKey> labelKeys) {
-    Utils.checkListElementNotNull(
-        checkNotNull(labelKeys, "labelKeys"), "labelKey element should not be null.");
+  public DerivedDoubleGauge addDerivedDoubleGauge(String name, MetricOptions options) {
     DerivedDoubleGaugeImpl derivedDoubleGauge =
         new DerivedDoubleGaugeImpl(
             checkNotNull(name, "name"),
-            checkNotNull(description, "description"),
-            checkNotNull(unit, "unit"),
-            Collections.unmodifiableList(new ArrayList<LabelKey>(labelKeys)));
-    registeredMeters.registerMeter(name, derivedDoubleGauge);
-    return derivedDoubleGauge;
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels());
+    return (DerivedDoubleGauge) registeredMeters.registerMeter(name, derivedDoubleGauge);
+  }
+
+  @Override
+  public LongCumulative addLongCumulative(String name, MetricOptions options) {
+    LongCumulativeImpl longCumulativeMetric =
+        new LongCumulativeImpl(
+            checkNotNull(name, "name"),
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels(),
+            clock.now());
+    return (LongCumulative) registeredMeters.registerMeter(name, longCumulativeMetric);
+  }
+
+  @Override
+  public DoubleCumulative addDoubleCumulative(String name, MetricOptions options) {
+    DoubleCumulativeImpl longCumulativeMetric =
+        new DoubleCumulativeImpl(
+            checkNotNull(name, "name"),
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels(),
+            clock.now());
+    return (DoubleCumulative) registeredMeters.registerMeter(name, longCumulativeMetric);
+  }
+
+  @Override
+  public DerivedLongCumulative addDerivedLongCumulative(String name, MetricOptions options) {
+    DerivedLongCumulativeImpl derivedLongCumulative =
+        new DerivedLongCumulativeImpl(
+            checkNotNull(name, "name"),
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels(),
+            clock.now());
+    return (DerivedLongCumulative) registeredMeters.registerMeter(name, derivedLongCumulative);
+  }
+
+  @Override
+  public DerivedDoubleCumulative addDerivedDoubleCumulative(String name, MetricOptions options) {
+    DerivedDoubleCumulativeImpl derivedDoubleCumulative =
+        new DerivedDoubleCumulativeImpl(
+            checkNotNull(name, "name"),
+            options.getDescription(),
+            options.getUnit(),
+            options.getLabelKeys(),
+            options.getConstantLabels(),
+            clock.now());
+    return (DerivedDoubleCumulative) registeredMeters.registerMeter(name, derivedDoubleCumulative);
   }
 
   private static final class RegisteredMeters {
@@ -112,17 +157,21 @@
       return registeredMeters;
     }
 
-    private synchronized void registerMeter(String meterName, Meter meter) {
+    private synchronized Meter registerMeter(String meterName, Meter meter) {
       Meter existingMeter = registeredMeters.get(meterName);
       if (existingMeter != null) {
-        // TODO(mayurkale): Allow users to register the same Meter multiple times without exception.
-        throw new IllegalArgumentException(
-            "A different metric with the same name already registered.");
+        if (!existingMeter.getMetricDescriptor().equals(meter.getMetricDescriptor())) {
+          throw new IllegalArgumentException(
+              "A different metric with the same name already registered.");
+        } else {
+          return existingMeter;
+        }
       }
 
       Map<String, Meter> registeredMetersCopy = new LinkedHashMap<String, Meter>(registeredMeters);
       registeredMetersCopy.put(meterName, meter);
       registeredMeters = Collections.unmodifiableMap(registeredMetersCopy);
+      return meter;
     }
   }
 
@@ -150,7 +199,7 @@
           metrics.add(metric);
         }
       }
-      return metrics;
+      return Collections.unmodifiableCollection(metrics);
     }
   }
 
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/IntervalBucket.java b/impl_core/src/main/java/io/opencensus/implcore/stats/IntervalBucket.java
index 172db53..4ff1bb3 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/IntervalBucket.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/IntervalBucket.java
@@ -22,6 +22,7 @@
 import com.google.common.collect.Maps;
 import io.opencensus.common.Duration;
 import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Aggregation;
 import io.opencensus.stats.Measure;
 import io.opencensus.tags.TagValue;
@@ -64,7 +65,7 @@
   void record(
       List</*@Nullable*/ TagValue> tagValues,
       double value,
-      Map<String, String> attachments,
+      Map<String, AttachmentValue> attachments,
       Timestamp timestamp) {
     if (!tagValueAggregationMap.containsKey(tagValues)) {
       tagValueAggregationMap.put(
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapImpl.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapImpl.java
index ee51796..9024705 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapImpl.java
@@ -16,16 +16,23 @@
 
 package io.opencensus.implcore.stats;
 
+import io.grpc.Context;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
 import io.opencensus.stats.MeasureMap;
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.unsafe.ContextUtils;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 /** Implementation of {@link MeasureMap}. */
 final class MeasureMapImpl extends MeasureMap {
+  private static final Logger logger = Logger.getLogger(MeasureMapImpl.class.getName());
+
   private final StatsManager statsManager;
   private final MeasureMapInternal.Builder builder = MeasureMapInternal.builder();
+  private volatile boolean hasUnsupportedValues;
 
   static MeasureMapImpl create(StatsManager statsManager) {
     return new MeasureMapImpl(statsManager);
@@ -37,18 +44,24 @@
 
   @Override
   public MeasureMapImpl put(MeasureDouble measure, double value) {
+    if (value < 0) {
+      hasUnsupportedValues = true;
+    }
     builder.put(measure, value);
     return this;
   }
 
   @Override
   public MeasureMapImpl put(MeasureLong measure, long value) {
+    if (value < 0) {
+      hasUnsupportedValues = true;
+    }
     builder.put(measure, value);
     return this;
   }
 
   @Override
-  public MeasureMap putAttachment(String key, String value) {
+  public MeasureMap putAttachment(String key, AttachmentValue value) {
     builder.putAttachment(key, value);
     return this;
   }
@@ -56,11 +69,16 @@
   @Override
   public void record() {
     // Use the context key directly, to avoid depending on the tags implementation.
-    record(ContextUtils.TAG_CONTEXT_KEY.get());
+    record(ContextUtils.getValue(Context.current()));
   }
 
   @Override
   public void record(TagContext tags) {
+    if (hasUnsupportedValues) {
+      // drop all the recorded values
+      logger.log(Level.WARNING, "Dropping values, value to record must be non-negative.");
+      return;
+    }
     statsManager.record(tags, builder.build());
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapInternal.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapInternal.java
index d867b34..682b96b 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapInternal.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureMapInternal.java
@@ -16,6 +16,7 @@
 
 package io.opencensus.implcore.stats;
 
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Measure;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
@@ -45,16 +46,18 @@
   }
 
   // Returns the contextual information associated with an example value.
-  Map<String, String> getAttachments() {
+  Map<String, AttachmentValue> getAttachments() {
     return attachments;
   }
 
   private final ArrayList<Measurement> measurements;
-  private final Map<String, String> attachments;
+  private final Map<String, AttachmentValue> attachments;
 
-  private MeasureMapInternal(ArrayList<Measurement> measurements, Map<String, String> attachments) {
+  private MeasureMapInternal(
+      ArrayList<Measurement> measurements, Map<String, AttachmentValue> attachments) {
     this.measurements = measurements;
-    this.attachments = Collections.unmodifiableMap(new HashMap<String, String>(attachments));
+    this.attachments =
+        Collections.unmodifiableMap(new HashMap<String, AttachmentValue>(attachments));
   }
 
   /** Builder for the {@link MeasureMapInternal} class. */
@@ -85,7 +88,7 @@
       return this;
     }
 
-    Builder putAttachment(String key, String value) {
+    Builder putAttachment(String key, AttachmentValue value) {
       this.attachments.put(key, value);
       return this;
     }
@@ -107,7 +110,7 @@
     }
 
     private final ArrayList<Measurement> measurements = new ArrayList<Measurement>();
-    private final Map<String, String> attachments = new HashMap<String, String>();
+    private final Map<String, AttachmentValue> attachments = new HashMap<String, AttachmentValue>();
 
     private Builder() {}
   }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureToViewMap.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureToViewMap.java
index 5da0cad..cdf1cc9 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureToViewMap.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MeasureToViewMap.java
@@ -23,6 +23,7 @@
 import io.opencensus.common.Clock;
 import io.opencensus.common.Timestamp;
 import io.opencensus.implcore.internal.CurrentState.State;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.metrics.export.Metric;
 import io.opencensus.stats.Measure;
 import io.opencensus.stats.Measurement;
@@ -146,7 +147,7 @@
   // Records stats with a set of tags.
   synchronized void record(TagContext tags, MeasureMapInternal stats, Timestamp timestamp) {
     Iterator<Measurement> iterator = stats.iterator();
-    Map<String, String> attachments = stats.getAttachments();
+    Map<String, AttachmentValue> attachments = stats.getAttachments();
     while (iterator.hasNext()) {
       Measurement measurement = iterator.next();
       Measure measure = measurement.getMeasure();
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MetricUtils.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MetricUtils.java
index 0dfb1d2..d4dc81d 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MetricUtils.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MetricUtils.java
@@ -21,15 +21,19 @@
 import io.opencensus.common.Functions;
 import io.opencensus.metrics.LabelKey;
 import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.metrics.export.MetricDescriptor;
 import io.opencensus.metrics.export.MetricDescriptor.Type;
 import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Count;
 import io.opencensus.stats.Measure;
 import io.opencensus.stats.View;
 import io.opencensus.tags.TagKey;
 import io.opencensus.tags.TagValue;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 /*>>>
 import org.checkerframework.checker.nullness.qual.Nullable;
@@ -39,6 +43,8 @@
 // Utils to convert Stats data models to Metric data models.
 final class MetricUtils {
 
+  @VisibleForTesting static final String COUNT_UNIT = "1";
+
   @javax.annotation.Nullable
   static MetricDescriptor viewToMetricDescriptor(View view) {
     if (view.getWindow() instanceof View.AggregationWindow.Interval) {
@@ -51,11 +57,12 @@
       labelKeys.add(LabelKey.create(tagKey.getName(), ""));
     }
     Measure measure = view.getMeasure();
+    Aggregation aggregation = view.getAggregation();
     return MetricDescriptor.create(
         view.getName().asString(),
         view.getDescription(),
-        measure.getUnit(),
-        getType(measure, view.getAggregation()),
+        getUnit(measure, aggregation),
+        getType(measure, aggregation),
         labelKeys);
   }
 
@@ -77,6 +84,13 @@
         AGGREGATION_TYPE_DEFAULT_FUNCTION);
   }
 
+  private static String getUnit(Measure measure, Aggregation aggregation) {
+    if (aggregation instanceof Count) {
+      return COUNT_UNIT;
+    }
+    return measure.getUnit();
+  }
+
   static List<LabelValue> tagValuesToLabelValues(List</*@Nullable*/ TagValue> tagValues) {
     List<LabelValue> labelValues = new ArrayList<LabelValue>();
     for (/*@Nullable*/ TagValue tagValue : tagValues) {
@@ -85,6 +99,14 @@
     return labelValues;
   }
 
+  static Map<String, String> toStringAttachments(Map<String, AttachmentValue> attachments) {
+    Map<String, String> stringAttachments = new HashMap<>();
+    for (Map.Entry<String, AttachmentValue> entry : attachments.entrySet()) {
+      stringAttachments.put(entry.getKey(), entry.getValue().getValue());
+    }
+    return stringAttachments;
+  }
+
   private static final Function<Object, Type> TYPE_CUMULATIVE_DOUBLE_FUNCTION =
       Functions.returnConstant(Type.CUMULATIVE_DOUBLE);
 
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MutableAggregation.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MutableAggregation.java
index 6e2bff1..7ccce0e 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MutableAggregation.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MutableAggregation.java
@@ -21,6 +21,8 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.metrics.export.Distribution;
 import io.opencensus.metrics.export.Distribution.BucketOptions;
 import io.opencensus.metrics.export.Point;
@@ -28,7 +30,6 @@
 import io.opencensus.stats.Aggregation;
 import io.opencensus.stats.AggregationData;
 import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.DistributionData.Exemplar;
 import io.opencensus.stats.BucketBoundaries;
 import java.util.ArrayList;
 import java.util.List;
@@ -49,7 +50,7 @@
    * @param attachments the contextual information on an {@link Exemplar}
    * @param timestamp the timestamp when the value is recorded
    */
-  abstract void add(double value, Map<String, String> attachments, Timestamp timestamp);
+  abstract void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp);
 
   // TODO(songya): remove this method once interval stats is completely removed.
   /**
@@ -85,7 +86,7 @@
     }
 
     @Override
-    void add(double value, Map<String, String> attachments, Timestamp timestamp) {
+    void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp) {
       sum += value;
     }
 
@@ -154,7 +155,7 @@
     }
 
     @Override
-    void add(double value, Map<String, String> attachments, Timestamp timestamp) {
+    void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp) {
       count++;
     }
 
@@ -202,7 +203,7 @@
     }
 
     @Override
-    void add(double value, Map<String, String> attachments, Timestamp timestamp) {
+    void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp) {
       count++;
       sum += value;
     }
@@ -258,10 +259,6 @@
     private long count = 0;
     private double sumOfSquaredDeviations = 0.0;
 
-    // Initial "impossible" values, that will get reset as soon as first value is added.
-    private double min = Double.POSITIVE_INFINITY;
-    private double max = Double.NEGATIVE_INFINITY;
-
     private final BucketBoundaries bucketBoundaries;
     private final long[] bucketCounts;
 
@@ -291,7 +288,7 @@
     }
 
     @Override
-    void add(double value, Map<String, String> attachments, Timestamp timestamp) {
+    void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp) {
       sum += value;
       count++;
 
@@ -308,13 +305,6 @@
       double deltaFromMean2 = value - mean;
       sumOfSquaredDeviations += deltaFromMean * deltaFromMean2;
 
-      if (value < min) {
-        min = value;
-      }
-      if (value > max) {
-        max = value;
-      }
-
       int bucket = 0;
       for (; bucket < bucketBoundaries.getBoundaries().size(); bucket++) {
         if (value < bucketBoundaries.getBoundaries().get(bucket)) {
@@ -360,13 +350,6 @@
       this.sum += mutableDistribution.sum;
       this.mean = this.sum / this.count;
 
-      if (mutableDistribution.min < this.min) {
-        this.min = mutableDistribution.min;
-      }
-      if (mutableDistribution.max > this.max) {
-        this.max = mutableDistribution.max;
-      }
-
       long[] bucketCounts = mutableDistribution.getBucketCounts();
       for (int i = 0; i < bucketCounts.length; i++) {
         this.bucketCounts[i] += bucketCounts[i];
@@ -401,15 +384,16 @@
         }
       }
       return DistributionData.create(
-          mean, count, min, max, sumOfSquaredDeviations, boxedBucketCounts, exemplarList);
+          mean, count, sumOfSquaredDeviations, boxedBucketCounts, exemplarList);
     }
 
+    @SuppressWarnings("deprecation")
     @Override
     Point toPoint(Timestamp timestamp) {
       List<Distribution.Bucket> buckets = new ArrayList<Distribution.Bucket>();
       for (int bucket = 0; bucket < bucketCounts.length; bucket++) {
         long bucketCount = bucketCounts[bucket];
-        @javax.annotation.Nullable AggregationData.DistributionData.Exemplar exemplar = null;
+        @javax.annotation.Nullable Exemplar exemplar = null;
         if (exemplars != null) {
           exemplar = exemplars[bucket];
         }
@@ -417,11 +401,7 @@
         Distribution.Bucket metricBucket;
         if (exemplar != null) {
           // Bucket with an Exemplar.
-          metricBucket =
-              Distribution.Bucket.create(
-                  bucketCount,
-                  Distribution.Exemplar.create(
-                      exemplar.getValue(), exemplar.getTimestamp(), exemplar.getAttachments()));
+          metricBucket = Distribution.Bucket.create(bucketCount, exemplar);
         } else {
           // Bucket with no Exemplar.
           metricBucket = Distribution.Bucket.create(bucketCount);
@@ -429,8 +409,6 @@
         buckets.add(metricBucket);
       }
 
-      // TODO(mayurkale): Drop the first bucket when converting to metrics.
-      // Reason: In Stats API, bucket bounds begin with -infinity (first bucket is (-infinity, 0)).
       BucketOptions bucketOptions = BucketOptions.explicitOptions(bucketBoundaries.getBoundaries());
 
       return Point.create(
@@ -448,14 +426,6 @@
       return count;
     }
 
-    double getMin() {
-      return min;
-    }
-
-    double getMax() {
-      return max;
-    }
-
     // Returns the aggregated sum of squared deviations.
     double getSumOfSquaredDeviations() {
       return sumOfSquaredDeviations;
@@ -495,7 +465,7 @@
     }
 
     @Override
-    void add(double value, Map<String, String> attachments, Timestamp timestamp) {
+    void add(double value, Map<String, AttachmentValue> attachments, Timestamp timestamp) {
       lastValue = value;
       // TODO(songya): remove this once interval stats is completely removed.
       if (!initialized) {
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/MutableViewData.java b/impl_core/src/main/java/io/opencensus/implcore/stats/MutableViewData.java
index 928675e..c756a54 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/MutableViewData.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/MutableViewData.java
@@ -33,6 +33,7 @@
 import io.opencensus.implcore.internal.CheckerFrameworkUtils;
 import io.opencensus.implcore.internal.CurrentState.State;
 import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.metrics.export.Metric;
 import io.opencensus.metrics.export.MetricDescriptor;
 import io.opencensus.metrics.export.MetricDescriptor.Type;
@@ -93,7 +94,10 @@
 
   /** Record stats with the given tags. */
   abstract void record(
-      TagContext context, double value, Timestamp timestamp, Map<String, String> attachments);
+      TagContext context,
+      double value,
+      Timestamp timestamp,
+      Map<String, AttachmentValue> attachments);
 
   /** Convert this {@link MutableViewData} to {@link ViewData}. */
   abstract ViewData toViewData(Timestamp now, State state);
@@ -128,6 +132,7 @@
     @javax.annotation.Nullable
     @Override
     Metric toMetric(Timestamp now, State state) {
+      handleTimeRewinds(now);
       if (state == State.DISABLED) {
         return null;
       }
@@ -146,7 +151,10 @@
 
     @Override
     void record(
-        TagContext context, double value, Timestamp timestamp, Map<String, String> attachments) {
+        TagContext context,
+        double value,
+        Timestamp timestamp,
+        Map<String, AttachmentValue> attachments) {
       List</*@Nullable*/ TagValue> tagValues =
           getTagValues(getTagMap(context), super.view.getColumns());
       if (!tagValueAggregationMap.containsKey(tagValues)) {
@@ -159,6 +167,7 @@
 
     @Override
     ViewData toViewData(Timestamp now, State state) {
+      handleTimeRewinds(now);
       if (state == State.ENABLED) {
         return ViewData.create(
             super.view,
@@ -173,6 +182,18 @@
       }
     }
 
+    /**
+     * This method attemps to migrate this view into a reasonable state in the event of time going
+     * backwards.
+     */
+    private void handleTimeRewinds(Timestamp now) {
+      if (now.compareTo(start) < 0) {
+        // Time went backwards, physics is broken, forget what we know.
+        clearStats();
+        start = now;
+      }
+    }
+
     @Override
     void clearStats() {
       tagValueAggregationMap.clear();
@@ -246,7 +267,10 @@
 
     @Override
     void record(
-        TagContext context, double value, Timestamp timestamp, Map<String, String> attachments) {
+        TagContext context,
+        double value,
+        Timestamp timestamp,
+        Map<String, AttachmentValue> attachments) {
       List</*@Nullable*/ TagValue> tagValues =
           getTagValues(getTagMap(context), super.view.getColumns());
       refreshBucketList(timestamp);
@@ -294,10 +318,18 @@
       }
       Timestamp startOfLastBucket =
           CheckerFrameworkUtils.castNonNull(buckets.peekLast()).getStart();
-      // TODO(songya): decide what to do when time goes backwards
-      checkArgument(
-          now.compareTo(startOfLastBucket) >= 0,
-          "Current time must be within or after the last bucket.");
+      // Time went backwards!  Physics has failed us!  drop everything we know and relearn.
+      // Prioritize:  Report data we're confident is correct.
+      if (now.compareTo(startOfLastBucket) < 0) {
+        // TODO: configurable time-skew handling with options:
+        // - Drop events in the future, keep others within a duration.
+        // - Drop all events on skew
+        // - Guess at time-skew and "fix" events
+        // - Reset our "start" time to now if necessary.
+        buckets.clear();
+        shiftBucketList(N + 1, now);
+        return;
+      }
       long elapsedTimeMillis = now.subtractTimestamp(startOfLastBucket).toMillis();
       long numOfPadBuckets = elapsedTimeMillis / bucketDuration.toMillis();
 
diff --git a/impl_core/src/main/java/io/opencensus/implcore/stats/RecordUtils.java b/impl_core/src/main/java/io/opencensus/implcore/stats/RecordUtils.java
index fbb593f..a9bba2c 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/stats/RecordUtils.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/stats/RecordUtils.java
@@ -17,6 +17,7 @@
 package io.opencensus.implcore.stats;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
@@ -27,7 +28,8 @@
 import io.opencensus.implcore.stats.MutableAggregation.MutableMean;
 import io.opencensus.implcore.stats.MutableAggregation.MutableSumDouble;
 import io.opencensus.implcore.stats.MutableAggregation.MutableSumLong;
-import io.opencensus.implcore.tags.TagContextImpl;
+import io.opencensus.implcore.tags.TagMapImpl;
+import io.opencensus.implcore.tags.TagValueWithMetadata;
 import io.opencensus.stats.Aggregation;
 import io.opencensus.stats.Aggregation.Count;
 import io.opencensus.stats.Aggregation.Distribution;
@@ -61,37 +63,66 @@
 
   @javax.annotation.Nullable @VisibleForTesting static final TagValue UNKNOWN_TAG_VALUE = null;
 
-  static Map<TagKey, TagValue> getTagMap(TagContext ctx) {
-    if (ctx instanceof TagContextImpl) {
-      return ((TagContextImpl) ctx).getTags();
-    } else {
-      Map<TagKey, TagValue> tags = Maps.newHashMap();
-      for (Iterator<Tag> i = InternalUtils.getTags(ctx); i.hasNext(); ) {
-        Tag tag = i.next();
-        tags.put(tag.getKey(), tag.getValue());
-      }
-      return tags;
+  // TODO(songy23): remove the mapping once we completely remove the deprecated RPC constants.
+  @VisibleForTesting static final TagKey RPC_STATUS = TagKey.create("canonical_status");
+  @VisibleForTesting static final TagKey RPC_METHOD = TagKey.create("method");
+  @VisibleForTesting static final TagKey GRPC_CLIENT_STATUS = TagKey.create("grpc_client_status");
+  @VisibleForTesting static final TagKey GRPC_CLIENT_METHOD = TagKey.create("grpc_client_method");
+  @VisibleForTesting static final TagKey GRPC_SERVER_STATUS = TagKey.create("grpc_server_status");
+  @VisibleForTesting static final TagKey GRPC_SERVER_METHOD = TagKey.create("grpc_server_method");
+  private static final Map<TagKey, TagKey[]> RPC_TAG_MAPPINGS =
+      ImmutableMap.<TagKey, TagKey[]>builder()
+          .put(RPC_STATUS, new TagKey[] {GRPC_CLIENT_STATUS, GRPC_SERVER_STATUS})
+          .put(RPC_METHOD, new TagKey[] {GRPC_CLIENT_METHOD, GRPC_SERVER_METHOD})
+          .build();
+
+  static Map<TagKey, TagValueWithMetadata> getTagMap(TagContext ctx) {
+    if (ctx instanceof TagMapImpl) {
+      return ((TagMapImpl) ctx).getTags();
     }
+    Map<TagKey, TagValueWithMetadata> tags = Maps.newHashMap();
+    for (Iterator<Tag> i = InternalUtils.getTags(ctx); i.hasNext(); ) {
+      Tag tag = i.next();
+      tags.put(tag.getKey(), TagValueWithMetadata.create(tag.getValue(), tag.getTagMetadata()));
+    }
+    return tags;
   }
 
   @VisibleForTesting
   static List</*@Nullable*/ TagValue> getTagValues(
-      Map<? extends TagKey, ? extends TagValue> tags, List<? extends TagKey> columns) {
+      Map<? extends TagKey, TagValueWithMetadata> tags, List<? extends TagKey> columns) {
     List</*@Nullable*/ TagValue> tagValues = new ArrayList</*@Nullable*/ TagValue>(columns.size());
     // Record all the measures in a "Greedy" way.
     // Every view aggregates every measure. This is similar to doing a GROUPBY view’s keys.
     for (int i = 0; i < columns.size(); ++i) {
       TagKey tagKey = columns.get(i);
       if (!tags.containsKey(tagKey)) {
-        // replace not found key values by null.
-        tagValues.add(UNKNOWN_TAG_VALUE);
+        @javax.annotation.Nullable TagValue tagValue = UNKNOWN_TAG_VALUE;
+        TagKey[] newKeys = RPC_TAG_MAPPINGS.get(tagKey);
+        if (newKeys != null) {
+          tagValue = getTagValueForDeprecatedRpcTag(tags, newKeys);
+        }
+        tagValues.add(tagValue);
       } else {
-        tagValues.add(tags.get(tagKey));
+        tagValues.add(tags.get(tagKey).getTagValue());
       }
     }
     return tagValues;
   }
 
+  // TODO(songy23): remove the mapping once we completely remove the deprecated RPC constants.
+  @javax.annotation.Nullable
+  private static TagValue getTagValueForDeprecatedRpcTag(
+      Map<? extends TagKey, TagValueWithMetadata> tags, TagKey[] newKeys) {
+    for (TagKey newKey : newKeys) {
+      TagValueWithMetadata valueWithMetadata = tags.get(newKey);
+      if (valueWithMetadata != null) {
+        return valueWithMetadata.getTagValue();
+      }
+    }
+    return UNKNOWN_TAG_VALUE;
+  }
+
   /**
    * Create an empty {@link MutableAggregation} based on the given {@link Aggregation}.
    *
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagContextUtils.java b/impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagMapUtils.java
similarity index 77%
rename from impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagContextUtils.java
rename to impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagMapUtils.java
index e6bb12f..5f97b05 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagContextUtils.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/CurrentTagMapUtils.java
@@ -24,17 +24,17 @@
 /**
  * Utility methods for accessing the {@link TagContext} contained in the {@link io.grpc.Context}.
  */
-final class CurrentTagContextUtils {
+final class CurrentTagMapUtils {
 
-  private CurrentTagContextUtils() {}
+  private CurrentTagMapUtils() {}
 
   /**
    * Returns the {@link TagContext} from the current context.
    *
    * @return the {@code TagContext} from the current context.
    */
-  static TagContext getCurrentTagContext() {
-    return ContextUtils.TAG_CONTEXT_KEY.get();
+  static TagContext getCurrentTagMap() {
+    return ContextUtils.getValue(Context.current());
   }
 
   /**
@@ -46,21 +46,21 @@
    * @return an object that defines a scope where the given {@code TagContext} is set to the current
    *     context.
    */
-  static Scope withTagContext(TagContext tags) {
-    return new WithTagContext(tags);
+  static Scope withTagMap(TagContext tags) {
+    return new WithTagMap(tags);
   }
 
-  private static final class WithTagContext implements Scope {
+  private static final class WithTagMap implements Scope {
 
     private final Context orig;
 
     /**
-     * Constructs a new {@link WithTagContext}.
+     * Constructs a new {@link WithTagMap}.
      *
      * @param tags the {@code TagContext} to be added to the current {@code Context}.
      */
-    private WithTagContext(TagContext tags) {
-      orig = Context.current().withValue(ContextUtils.TAG_CONTEXT_KEY, tags).attach();
+    private WithTagMap(TagContext tags) {
+      orig = ContextUtils.withValue(Context.current(), tags).attach();
     }
 
     @Override
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagContextBuilder.java b/impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagMapBuilder.java
similarity index 76%
rename from impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagContextBuilder.java
rename to impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagMapBuilder.java
index eae54c5..866759b 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagContextBuilder.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/NoopTagMapBuilder.java
@@ -21,27 +21,34 @@
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.TagContextBuilder;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
 import io.opencensus.tags.TagValue;
 
 /** {@link TagContextBuilder} that is used when tagging is disabled. */
-final class NoopTagContextBuilder extends TagContextBuilder {
-  static final NoopTagContextBuilder INSTANCE = new NoopTagContextBuilder();
+final class NoopTagMapBuilder extends TagContextBuilder {
+  static final NoopTagMapBuilder INSTANCE = new NoopTagMapBuilder();
 
-  private NoopTagContextBuilder() {}
+  private NoopTagMapBuilder() {}
 
   @Override
+  @SuppressWarnings("deprecation")
   public TagContextBuilder put(TagKey key, TagValue value) {
     return this;
   }
 
   @Override
+  public TagContextBuilder put(TagKey key, TagValue value, TagMetadata tagMetadata) {
+    return this;
+  }
+
+  @Override
   public TagContextBuilder remove(TagKey key) {
     return this;
   }
 
   @Override
   public TagContext build() {
-    return TagContextImpl.EMPTY;
+    return TagMapImpl.EMPTY;
   }
 
   @Override
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextBuilderImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextBuilderImpl.java
deleted file mode 100644
index a17198d..0000000
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextBuilderImpl.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.implcore.tags;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import io.opencensus.common.Scope;
-import io.opencensus.tags.TagContextBuilder;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
-import java.util.HashMap;
-import java.util.Map;
-
-final class TagContextBuilderImpl extends TagContextBuilder {
-  private final Map<TagKey, TagValue> tags;
-
-  TagContextBuilderImpl(Map<TagKey, TagValue> tags) {
-    this.tags = new HashMap<TagKey, TagValue>(tags);
-  }
-
-  TagContextBuilderImpl() {
-    this.tags = new HashMap<TagKey, TagValue>();
-  }
-
-  @Override
-  public TagContextBuilderImpl put(TagKey key, TagValue value) {
-    tags.put(checkNotNull(key, "key"), checkNotNull(value, "value"));
-    return this;
-  }
-
-  @Override
-  public TagContextBuilderImpl remove(TagKey key) {
-    tags.remove(checkNotNull(key, "key"));
-    return this;
-  }
-
-  @Override
-  public TagContextImpl build() {
-    return new TagContextImpl(tags);
-  }
-
-  @Override
-  public Scope buildScoped() {
-    return CurrentTagContextUtils.withTagContext(build());
-  }
-}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextImpl.java
deleted file mode 100644
index f7a8ff8..0000000
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextImpl.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2017, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.implcore.tags;
-
-import io.opencensus.tags.Tag;
-import io.opencensus.tags.TagContext;
-import io.opencensus.tags.TagKey;
-import io.opencensus.tags.TagValue;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-import javax.annotation.Nullable;
-import javax.annotation.concurrent.Immutable;
-
-@Immutable
-public final class TagContextImpl extends TagContext {
-
-  public static final TagContextImpl EMPTY =
-      new TagContextImpl(Collections.<TagKey, TagValue>emptyMap());
-
-  // The types of the TagKey and value must match for each entry.
-  private final Map<TagKey, TagValue> tags;
-
-  public TagContextImpl(Map<? extends TagKey, ? extends TagValue> tags) {
-    this.tags = Collections.unmodifiableMap(new HashMap<TagKey, TagValue>(tags));
-  }
-
-  public Map<TagKey, TagValue> getTags() {
-    return tags;
-  }
-
-  @Override
-  protected Iterator<Tag> getIterator() {
-    return new TagIterator(tags);
-  }
-
-  @Override
-  public boolean equals(@Nullable Object other) {
-    // Directly compare the tags when both objects are TagContextImpls, for efficiency.
-    if (other instanceof TagContextImpl) {
-      return getTags().equals(((TagContextImpl) other).getTags());
-    }
-    return super.equals(other);
-  }
-
-  private static final class TagIterator implements Iterator<Tag> {
-    Iterator<Map.Entry<TagKey, TagValue>> iterator;
-
-    TagIterator(Map<TagKey, TagValue> tags) {
-      iterator = tags.entrySet().iterator();
-    }
-
-    @Override
-    public boolean hasNext() {
-      return iterator.hasNext();
-    }
-
-    @Override
-    public Tag next() {
-      final Entry<TagKey, TagValue> next = iterator.next();
-      return Tag.create(next.getKey(), next.getValue());
-    }
-
-    @Override
-    public void remove() {
-      throw new UnsupportedOperationException("TagIterator.remove()");
-    }
-  }
-}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextUtils.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextUtils.java
index 5fbc505..37b8a00 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextUtils.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/TagContextUtils.java
@@ -27,7 +27,7 @@
    * @param tag tag containing the key and value to set.
    * @param builder the builder to update.
    */
-  static void addTagToBuilder(Tag tag, TagContextBuilderImpl builder) {
-    builder.put(tag.getKey(), tag.getValue());
+  static void addTagToBuilder(Tag tag, TagMapBuilderImpl builder) {
+    builder.put(tag.getKey(), tag.getValue(), tag.getTagMetadata());
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapBuilderImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapBuilderImpl.java
new file mode 100644
index 0000000..85c8c96
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapBuilderImpl.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.tags;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import io.opencensus.common.Scope;
+import io.opencensus.tags.TagContextBuilder;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
+import io.opencensus.tags.TagValue;
+import java.util.HashMap;
+import java.util.Map;
+
+final class TagMapBuilderImpl extends TagContextBuilder {
+
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+
+  private final Map<TagKey, TagValueWithMetadata> tags;
+
+  TagMapBuilderImpl(Map<TagKey, TagValueWithMetadata> tags) {
+    this.tags = new HashMap<TagKey, TagValueWithMetadata>(tags);
+  }
+
+  TagMapBuilderImpl() {
+    this.tags = new HashMap<TagKey, TagValueWithMetadata>();
+  }
+
+  @Override
+  @SuppressWarnings("deprecation")
+  public TagMapBuilderImpl put(TagKey key, TagValue value) {
+    put(key, value, METADATA_UNLIMITED_PROPAGATION);
+    return this;
+  }
+
+  @Override
+  public TagContextBuilder put(TagKey key, TagValue value, TagMetadata tagMetadata) {
+    TagValueWithMetadata valueWithMetadata =
+        TagValueWithMetadata.create(
+            checkNotNull(value, "value"), checkNotNull(tagMetadata, "tagMetadata"));
+    tags.put(checkNotNull(key, "key"), valueWithMetadata);
+    return this;
+  }
+
+  @Override
+  public TagMapBuilderImpl remove(TagKey key) {
+    tags.remove(checkNotNull(key, "key"));
+    return this;
+  }
+
+  @Override
+  public TagMapImpl build() {
+    return new TagMapImpl(tags);
+  }
+
+  @Override
+  public Scope buildScoped() {
+    return CurrentTagMapUtils.withTagMap(build());
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapImpl.java
new file mode 100644
index 0000000..4ae9778
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/TagMapImpl.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.tags;
+
+import io.opencensus.tags.Tag;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/** Implementation of {@link TagContext}. */
+@Immutable
+public final class TagMapImpl extends TagContext {
+
+  /** Empty {@link TagMapImpl} with no tags. */
+  public static final TagMapImpl EMPTY =
+      new TagMapImpl(Collections.<TagKey, TagValueWithMetadata>emptyMap());
+
+  // The types of the TagKey and value must match for each entry.
+  private final Map<TagKey, TagValueWithMetadata> tags;
+
+  /**
+   * Creates a new {@link TagMapImpl} with the given tags.
+   *
+   * @param tags the initial tags for this {@code TagMapImpl}.
+   */
+  public TagMapImpl(Map<? extends TagKey, ? extends TagValueWithMetadata> tags) {
+    this.tags = Collections.unmodifiableMap(new HashMap<TagKey, TagValueWithMetadata>(tags));
+  }
+
+  /**
+   * Returns the tags of this {@link TagMapImpl}.
+   *
+   * @return the tags.
+   */
+  public Map<TagKey, TagValueWithMetadata> getTags() {
+    return tags;
+  }
+
+  @Override
+  protected Iterator<Tag> getIterator() {
+    return new TagIterator(tags);
+  }
+
+  @Override
+  public boolean equals(@Nullable Object other) {
+    // Directly compare the tags when both objects are TagMapImpls, for efficiency.
+    if (other instanceof TagMapImpl) {
+      return getTags().equals(((TagMapImpl) other).getTags());
+    }
+    return super.equals(other);
+  }
+
+  private static final class TagIterator implements Iterator<Tag> {
+    Iterator<Map.Entry<TagKey, TagValueWithMetadata>> iterator;
+
+    TagIterator(Map<TagKey, TagValueWithMetadata> tags) {
+      iterator = tags.entrySet().iterator();
+    }
+
+    @Override
+    public boolean hasNext() {
+      return iterator.hasNext();
+    }
+
+    @Override
+    public Tag next() {
+      final Entry<TagKey, TagValueWithMetadata> next = iterator.next();
+      TagValueWithMetadata valueWithMetadata = next.getValue();
+      return Tag.create(
+          next.getKey(), valueWithMetadata.getTagValue(), valueWithMetadata.getTagMetadata());
+    }
+
+    @Override
+    public void remove() {
+      throw new UnsupportedOperationException("TagIterator.remove()");
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TagValueWithMetadata.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TagValueWithMetadata.java
new file mode 100644
index 0000000..ba3a8fb
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/TagValueWithMetadata.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.tags;
+
+import com.google.auto.value.AutoValue;
+import io.opencensus.common.Internal;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagValue;
+import javax.annotation.concurrent.Immutable;
+
+/** Internal helper class that holds a TagValue and a TagMetadata. */
+@Immutable
+@AutoValue
+@Internal
+public abstract class TagValueWithMetadata {
+
+  TagValueWithMetadata() {}
+
+  /**
+   * Creates a {@link TagValueWithMetadata}.
+   *
+   * @param tagValue the tag value.
+   * @param tagMetadata metadata for the tag.
+   * @return a {@code TagValueWithMetadata}.
+   */
+  public static TagValueWithMetadata create(TagValue tagValue, TagMetadata tagMetadata) {
+    return new AutoValue_TagValueWithMetadata(tagValue, tagMetadata);
+  }
+
+  /**
+   * Returns the {@code TagValue}.
+   *
+   * @return the {@code TagValue}.
+   */
+  public abstract TagValue getTagValue();
+
+  abstract TagMetadata getTagMetadata();
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/TaggerImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/TaggerImpl.java
index dcf9a1b..bdf48fe 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/TaggerImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/TaggerImpl.java
@@ -29,8 +29,8 @@
 
 /** Implementation of {@link Tagger}. */
 public final class TaggerImpl extends Tagger {
-  // All methods in this class use TagContextImpl and TagContextBuilderImpl. For example,
-  // withTagContext(...) always puts a TagContextImpl into scope, even if the argument is another
+  // All methods in this class use TagMapImpl and TagMapBuilderImpl. For example,
+  // withTagContext(...) always puts a TagMapImpl into scope, even if the argument is another
   // TagContext subclass.
 
   private final CurrentState state;
@@ -40,54 +40,54 @@
   }
 
   @Override
-  public TagContextImpl empty() {
-    return TagContextImpl.EMPTY;
+  public TagMapImpl empty() {
+    return TagMapImpl.EMPTY;
   }
 
   @Override
-  public TagContextImpl getCurrentTagContext() {
+  public TagMapImpl getCurrentTagContext() {
     return state.getInternal() == State.DISABLED
-        ? TagContextImpl.EMPTY
-        : toTagContextImpl(CurrentTagContextUtils.getCurrentTagContext());
+        ? TagMapImpl.EMPTY
+        : toTagMapImpl(CurrentTagMapUtils.getCurrentTagMap());
   }
 
   @Override
   public TagContextBuilder emptyBuilder() {
     return state.getInternal() == State.DISABLED
-        ? NoopTagContextBuilder.INSTANCE
-        : new TagContextBuilderImpl();
+        ? NoopTagMapBuilder.INSTANCE
+        : new TagMapBuilderImpl();
   }
 
   @Override
   public TagContextBuilder currentBuilder() {
     return state.getInternal() == State.DISABLED
-        ? NoopTagContextBuilder.INSTANCE
-        : toBuilder(CurrentTagContextUtils.getCurrentTagContext());
+        ? NoopTagMapBuilder.INSTANCE
+        : toBuilder(CurrentTagMapUtils.getCurrentTagMap());
   }
 
   @Override
   public TagContextBuilder toBuilder(TagContext tags) {
     return state.getInternal() == State.DISABLED
-        ? NoopTagContextBuilder.INSTANCE
-        : toTagContextBuilderImpl(tags);
+        ? NoopTagMapBuilder.INSTANCE
+        : toTagMapBuilderImpl(tags);
   }
 
   @Override
   public Scope withTagContext(TagContext tags) {
     return state.getInternal() == State.DISABLED
         ? NoopScope.getInstance()
-        : CurrentTagContextUtils.withTagContext(toTagContextImpl(tags));
+        : CurrentTagMapUtils.withTagMap(toTagMapImpl(tags));
   }
 
-  private static TagContextImpl toTagContextImpl(TagContext tags) {
-    if (tags instanceof TagContextImpl) {
-      return (TagContextImpl) tags;
+  private static TagMapImpl toTagMapImpl(TagContext tags) {
+    if (tags instanceof TagMapImpl) {
+      return (TagMapImpl) tags;
     } else {
       Iterator<Tag> i = InternalUtils.getTags(tags);
       if (!i.hasNext()) {
-        return TagContextImpl.EMPTY;
+        return TagMapImpl.EMPTY;
       }
-      TagContextBuilderImpl builder = new TagContextBuilderImpl();
+      TagMapBuilderImpl builder = new TagMapBuilderImpl();
       while (i.hasNext()) {
         Tag tag = i.next();
         if (tag != null) {
@@ -98,12 +98,12 @@
     }
   }
 
-  private static TagContextBuilderImpl toTagContextBuilderImpl(TagContext tags) {
-    // Copy the tags more efficiently in the expected case, when the TagContext is a TagContextImpl.
-    if (tags instanceof TagContextImpl) {
-      return new TagContextBuilderImpl(((TagContextImpl) tags).getTags());
+  private static TagMapBuilderImpl toTagMapBuilderImpl(TagContext tags) {
+    // Copy the tags more efficiently in the expected case, when the TagContext is a TagMapImpl.
+    if (tags instanceof TagMapImpl) {
+      return new TagMapBuilderImpl(((TagMapImpl) tags).getTags());
     } else {
-      TagContextBuilderImpl builder = new TagContextBuilderImpl();
+      TagMapBuilderImpl builder = new TagMapBuilderImpl();
       for (Iterator<Tag> i = InternalUtils.getTags(tags); i.hasNext(); ) {
         Tag tag = i.next();
         if (tag != null) {
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/SerializationUtils.java b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/BinarySerializationUtils.java
similarity index 89%
rename from impl_core/src/main/java/io/opencensus/implcore/tags/propagation/SerializationUtils.java
rename to impl_core/src/main/java/io/opencensus/implcore/tags/propagation/BinarySerializationUtils.java
index 2daad95..80814b2 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/SerializationUtils.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/BinarySerializationUtils.java
@@ -21,11 +21,14 @@
 import com.google.common.io.ByteArrayDataOutput;
 import com.google.common.io.ByteStreams;
 import io.opencensus.implcore.internal.VarInt;
-import io.opencensus.implcore.tags.TagContextImpl;
+import io.opencensus.implcore.tags.TagMapImpl;
+import io.opencensus.implcore.tags.TagValueWithMetadata;
 import io.opencensus.tags.InternalUtils;
 import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import io.opencensus.tags.TagValue;
 import io.opencensus.tags.propagation.TagContextDeserializationException;
 import io.opencensus.tags.propagation.TagContextSerializationException;
@@ -63,8 +66,12 @@
  *       </ul>
  * </ul>
  */
-final class SerializationUtils {
-  private SerializationUtils() {}
+final class BinarySerializationUtils {
+
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+
+  private BinarySerializationUtils() {}
 
   @VisibleForTesting static final int VERSION_ID = 0;
   @VisibleForTesting static final int TAG_FIELD_ID = 0;
@@ -80,6 +87,9 @@
     int totalChars = 0; // Here chars are equivalent to bytes, since we're using ascii chars.
     for (Iterator<Tag> i = InternalUtils.getTags(tags); i.hasNext(); ) {
       Tag tag = i.next();
+      if (TagTtl.NO_PROPAGATION.equals(tag.getTagMetadata().getTagTtl())) {
+        continue;
+      }
       totalChars += tag.getKey().getName().length();
       totalChars += tag.getValue().asString().length();
       encodeTag(tag, byteArrayDataOutput);
@@ -94,7 +104,7 @@
 
   // Deserializes input to TagContext based on the binary format standard.
   // The encoded tags are of the form: <version_id><encoded_tags>
-  static TagContextImpl deserializeBinary(byte[] bytes) throws TagContextDeserializationException {
+  static TagMapImpl deserializeBinary(byte[] bytes) throws TagContextDeserializationException {
     try {
       if (bytes.length == 0) {
         // Does not allow empty byte array.
@@ -107,15 +117,15 @@
         throw new TagContextDeserializationException(
             "Wrong Version ID: " + versionId + ". Currently supports version up to: " + VERSION_ID);
       }
-      return new TagContextImpl(parseTags(buffer));
+      return new TagMapImpl(parseTags(buffer));
     } catch (BufferUnderflowException exn) {
       throw new TagContextDeserializationException(exn.toString()); // byte array format error.
     }
   }
 
-  private static Map<TagKey, TagValue> parseTags(ByteBuffer buffer)
+  private static Map<TagKey, TagValueWithMetadata> parseTags(ByteBuffer buffer)
       throws TagContextDeserializationException {
-    Map<TagKey, TagValue> tags = new HashMap<TagKey, TagValue>();
+    Map<TagKey, TagValueWithMetadata> tags = new HashMap<TagKey, TagValueWithMetadata>();
     int limit = buffer.limit();
     int totalChars = 0; // Here chars are equivalent to bytes, since we're using ascii chars.
     while (buffer.position() < limit) {
@@ -125,7 +135,7 @@
         TagValue val = createTagValue(key, decodeString(buffer));
         totalChars += key.getName().length();
         totalChars += val.asString().length();
-        tags.put(key, val);
+        tags.put(key, TagValueWithMetadata.create(val, METADATA_UNLIMITED_PROPAGATION));
       } else {
         // Stop parsing at the first unknown field ID, since there is no way to know its length.
         // TODO(sebright): Consider storing the rest of the byte array in the TagContext.
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormat.java b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormat.java
new file mode 100644
index 0000000..694a7e8
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormat.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.tags.propagation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Splitter;
+import io.opencensus.implcore.internal.CurrentState;
+import io.opencensus.implcore.internal.CurrentState.State;
+import io.opencensus.implcore.tags.TagMapImpl;
+import io.opencensus.implcore.tags.TagValueWithMetadata;
+import io.opencensus.tags.InternalUtils;
+import io.opencensus.tags.Tag;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.propagation.TagContextDeserializationException;
+import io.opencensus.tags.propagation.TagContextSerializationException;
+import io.opencensus.tags.propagation.TagContextTextFormat;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.Nullable;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
+/**
+ * Implementation of the W3C correlation context propagation protocol. See <a
+ * href=https://github.com/w3c/correlation-context>w3c/correlation-context</a>.
+ */
+final class CorrelationContextFormat extends TagContextTextFormat {
+
+  @VisibleForTesting static final String CORRELATION_CONTEXT = "Correlation-Context";
+  private static final List<String> FIELDS = Collections.singletonList(CORRELATION_CONTEXT);
+
+  @VisibleForTesting static final int MAX_NUMBER_OF_TAGS = 180;
+  private static final int TAG_SERIALIZED_SIZE_LIMIT = 4096;
+  private static final int TAGCONTEXT_SERIALIZED_SIZE_LIMIT = 8192;
+  private static final char TAG_KEY_VALUE_DELIMITER = '=';
+  private static final char TAG_DELIMITER = ',';
+  private static final Splitter TAG_KEY_VALUE_SPLITTER = Splitter.on(TAG_KEY_VALUE_DELIMITER);
+  private static final Splitter TAG_SPLITTER = Splitter.on(TAG_DELIMITER);
+
+  // TODO(songya): These constants are for tag metadata. Uncomment them when we decided to support
+  // encoding tag metadata.
+  private static final char TAG_PROPERTIES_DELIMITER = ';';
+  // private static final char TAG_PROPERTIES_KEY_VALUE_DELIMITER = '=';
+
+  @VisibleForTesting
+  static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+
+  private final CurrentState state;
+
+  CorrelationContextFormat(CurrentState state) {
+    this.state = state;
+  }
+
+  @Override
+  public List<String> fields() {
+    return FIELDS;
+  }
+
+  @Override
+  public <C /*>>> extends @NonNull Object*/> void inject(
+      TagContext tagContext, C carrier, Setter<C> setter) throws TagContextSerializationException {
+    checkNotNull(tagContext, "tagContext");
+    checkNotNull(carrier, "carrier");
+    checkNotNull(setter, "setter");
+    if (State.DISABLED.equals(state.getInternal())) {
+      return;
+    }
+
+    try {
+      StringBuilder stringBuilder = new StringBuilder(TAGCONTEXT_SERIALIZED_SIZE_LIMIT);
+      int totalChars = 0; // Here chars are equivalent to bytes, since we're using ascii chars.
+      int totalTags = 0;
+      for (Iterator<Tag> i = InternalUtils.getTags(tagContext); i.hasNext(); ) {
+        Tag tag = i.next();
+        if (TagTtl.NO_PROPAGATION.equals(tag.getTagMetadata().getTagTtl())) {
+          continue;
+        }
+        if (stringBuilder.length() > 0) {
+          stringBuilder.append(TAG_DELIMITER);
+        }
+        totalTags++;
+        totalChars += encodeTag(tag, stringBuilder);
+      }
+      checkArgument(
+          totalTags <= MAX_NUMBER_OF_TAGS,
+          "Number of tags in the TagContext exceeds limit " + MAX_NUMBER_OF_TAGS);
+      // Note per W3C spec, only the length of tag key and value counts towards the total length.
+      // Length of properties (a.k.a TagMetadata) does not count.
+      checkArgument(
+          totalChars <= TAGCONTEXT_SERIALIZED_SIZE_LIMIT,
+          "Size of TagContext exceeds the maximum serialized size "
+              + TAGCONTEXT_SERIALIZED_SIZE_LIMIT);
+      setter.put(carrier, CORRELATION_CONTEXT, stringBuilder.toString());
+    } catch (IllegalArgumentException e) {
+      throw new TagContextSerializationException("Failed to serialize TagContext", e);
+    }
+  }
+
+  // Encodes the tag to the given string builder, and returns the length of encoded key-value pair.
+  private static int encodeTag(Tag tag, StringBuilder stringBuilder) {
+    String key = tag.getKey().getName();
+    String value = tag.getValue().asString();
+    int charsOfTag = key.length() + value.length();
+    // This should never happen with our current constraints (<= 255 chars) on tags.
+    checkArgument(
+        charsOfTag <= TAG_SERIALIZED_SIZE_LIMIT,
+        "Serialized size of tag " + tag + " exceeds limit " + TAG_SERIALIZED_SIZE_LIMIT);
+
+    // TODO(songy23): do we want to encode TagMetadata?
+    stringBuilder.append(key).append(TAG_KEY_VALUE_DELIMITER).append(value);
+    return charsOfTag;
+  }
+
+  @Override
+  public <C /*>>> extends @NonNull Object*/> TagContext extract(C carrier, Getter<C> getter)
+      throws TagContextDeserializationException {
+    checkNotNull(carrier, "carrier");
+    checkNotNull(getter, "getter");
+    if (State.DISABLED.equals(state.getInternal())) {
+      return TagMapImpl.EMPTY;
+    }
+
+    @Nullable String correlationContext = getter.get(carrier, CORRELATION_CONTEXT);
+    if (correlationContext == null) {
+      throw new TagContextDeserializationException(CORRELATION_CONTEXT + " not present.");
+    }
+    try {
+      if (correlationContext.isEmpty()) {
+        return TagMapImpl.EMPTY;
+      }
+      Map<TagKey, TagValueWithMetadata> tags = new HashMap<>();
+      List<String> stringTags = TAG_SPLITTER.splitToList(correlationContext);
+      for (String stringTag : stringTags) {
+        decodeTag(stringTag, tags);
+      }
+      return new TagMapImpl(tags);
+    } catch (IllegalArgumentException e) {
+      throw new TagContextDeserializationException("Invalid TagContext: " + correlationContext, e);
+    }
+  }
+
+  // Decodes tag key, value and metadata from the encoded string tag, then puts it into the tag map.
+  // The format of encoded string tag is name1=value1;properties1=p1;properties2=p2.
+  private static void decodeTag(String stringTag, Map<TagKey, TagValueWithMetadata> tags) {
+    String keyWithValue;
+    int firstPropertyIndex = stringTag.indexOf(TAG_PROPERTIES_DELIMITER);
+    if (firstPropertyIndex != -1) { // Tag with properties.
+      keyWithValue = stringTag.substring(0, firstPropertyIndex);
+      // TODO(songya): support decoding tag properties.
+    } else { // Tag without properties.
+      keyWithValue = stringTag;
+    }
+    List<String> keyValuePair = TAG_KEY_VALUE_SPLITTER.splitToList(keyWithValue);
+    checkArgument(keyValuePair.size() == 2, "Malformed tag " + stringTag);
+    TagKey key = TagKey.create(keyValuePair.get(0).trim());
+    TagValue value = TagValue.create(keyValuePair.get(1).trim());
+    TagValueWithMetadata valueWithMetadata =
+        TagValueWithMetadata.create(value, METADATA_UNLIMITED_PROPAGATION);
+    tags.put(key, valueWithMetadata);
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImpl.java
index 5a25da5..e4128f5 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImpl.java
@@ -18,7 +18,7 @@
 
 import io.opencensus.implcore.internal.CurrentState;
 import io.opencensus.implcore.internal.CurrentState.State;
-import io.opencensus.implcore.tags.TagContextImpl;
+import io.opencensus.implcore.tags.TagMapImpl;
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.propagation.TagContextBinarySerializer;
 import io.opencensus.tags.propagation.TagContextDeserializationException;
@@ -37,13 +37,13 @@
   public byte[] toByteArray(TagContext tags) throws TagContextSerializationException {
     return state.getInternal() == State.DISABLED
         ? EMPTY_BYTE_ARRAY
-        : SerializationUtils.serializeBinary(tags);
+        : BinarySerializationUtils.serializeBinary(tags);
   }
 
   @Override
   public TagContext fromByteArray(byte[] bytes) throws TagContextDeserializationException {
     return state.getInternal() == State.DISABLED
-        ? TagContextImpl.EMPTY
-        : SerializationUtils.deserializeBinary(bytes);
+        ? TagMapImpl.EMPTY
+        : BinarySerializationUtils.deserializeBinary(bytes);
   }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagPropagationComponentImpl.java b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagPropagationComponentImpl.java
index 9ba0da4..227032e 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagPropagationComponentImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/tags/propagation/TagPropagationComponentImpl.java
@@ -18,18 +18,26 @@
 
 import io.opencensus.implcore.internal.CurrentState;
 import io.opencensus.tags.propagation.TagContextBinarySerializer;
+import io.opencensus.tags.propagation.TagContextTextFormat;
 import io.opencensus.tags.propagation.TagPropagationComponent;
 
 /** Implementation of {@link TagPropagationComponent}. */
 public final class TagPropagationComponentImpl extends TagPropagationComponent {
   private final TagContextBinarySerializer tagContextBinarySerializer;
+  private final TagContextTextFormat tagContextTextFormat;
 
   public TagPropagationComponentImpl(CurrentState state) {
     tagContextBinarySerializer = new TagContextBinarySerializerImpl(state);
+    tagContextTextFormat = new CorrelationContextFormat(state);
   }
 
   @Override
   public TagContextBinarySerializer getBinarySerializer() {
     return tagContextBinarySerializer;
   }
+
+  @Override
+  public TagContextTextFormat getCorrelationContextFormat() {
+    return tagContextTextFormat;
+  }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/RecordEventsSpanImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/RecordEventsSpanImpl.java
index af3545b..eb979eb 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/RecordEventsSpanImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/RecordEventsSpanImpl.java
@@ -23,7 +23,6 @@
 import com.google.common.base.Preconditions;
 import com.google.common.collect.EvictingQueue;
 import io.opencensus.common.Clock;
-import io.opencensus.implcore.internal.CheckerFrameworkUtils;
 import io.opencensus.implcore.internal.TimestampConverter;
 import io.opencensus.implcore.trace.internal.ConcurrentIntrusiveList.Element;
 import io.opencensus.trace.Annotation;
@@ -75,7 +74,7 @@
   private final Clock clock;
   // The time converter used to convert nano time to Timestamp. This is needed because Java has
   // millisecond granularity for Timestamp and tracing events are recorded more often.
-  @Nullable private final TimestampConverter timestampConverter;
+  private final TimestampConverter timestampConverter;
   // The start time of the span.
   private final long startNanoTime;
   // Set of recorded attributes. DO NOT CALL any other method that changes the ordering of events.
@@ -94,6 +93,9 @@
   @GuardedBy("this")
   @Nullable
   private TraceEvents<Link> links;
+  // The number of children.
+  @GuardedBy("this")
+  private int numberOfChildren;
   // The status of the span.
   @GuardedBy("this")
   @Nullable
@@ -259,16 +261,14 @@
           hasRemoteParent,
           name,
           kind,
-          CheckerFrameworkUtils.castNonNull(timestampConverter).convertNanoTime(startNanoTime),
+          timestampConverter.convertNanoTime(startNanoTime),
           attributesSpanData,
           annotationsSpanData,
           messageEventsSpanData,
           linksSpanData,
-          null, // Not supported yet.
+          numberOfChildren,
           hasBeenEnded ? getStatusWithDefault() : null,
-          hasBeenEnded
-              ? CheckerFrameworkUtils.castNonNull(timestampConverter).convertNanoTime(endNanoTime)
-              : null);
+          hasBeenEnded ? timestampConverter.convertNanoTime(endNanoTime) : null);
     }
   }
 
@@ -384,6 +384,16 @@
     startEndHandler.onEnd(this);
   }
 
+  void addChild() {
+    synchronized (this) {
+      if (hasBeenEnded) {
+        logger.log(Level.FINE, "Calling end() on an ended Span.");
+        return;
+      }
+      numberOfChildren++;
+    }
+  }
+
   @GuardedBy("this")
   private AttributesWithCapacity getInitializedAttributes() {
     if (attributes == null) {
@@ -426,14 +436,13 @@
   }
 
   private static <T> SpanData.TimedEvents<T> createTimedEvents(
-      TraceEvents<EventWithNanoTime<T>> events, @Nullable TimestampConverter timestampConverter) {
+      TraceEvents<EventWithNanoTime<T>> events, TimestampConverter timestampConverter) {
     if (events == null) {
       return SpanData.TimedEvents.create(Collections.<TimedEvent<T>>emptyList(), 0);
     }
     List<TimedEvent<T>> eventsList = new ArrayList<TimedEvent<T>>(events.events.size());
     for (EventWithNanoTime<T> networkEvent : events.events) {
-      eventsList.add(
-          networkEvent.toSpanDataTimedEvent(CheckerFrameworkUtils.castNonNull(timestampConverter)));
+      eventsList.add(networkEvent.toSpanDataTimedEvent(timestampConverter));
     }
     return SpanData.TimedEvents.create(eventsList, events.getNumberOfDroppedEvents());
   }
@@ -572,6 +581,7 @@
     this.clock = clock;
     this.hasBeenEnded = false;
     this.sampleToLocalSpanStore = false;
+    this.numberOfChildren = 0;
     this.timestampConverter =
         timestampConverter != null ? timestampConverter : TimestampConverter.now(clock);
     startNanoTime = clock.nowNanos();
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/SpanBuilderImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/SpanBuilderImpl.java
index 5565e9d..bd258b3 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/SpanBuilderImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/SpanBuilderImpl.java
@@ -58,14 +58,14 @@
   @Nullable private Kind kind;
 
   private Span startSpanInternal(
-      @Nullable SpanContext parent,
+      @Nullable SpanContext parentContext,
       @Nullable Boolean hasRemoteParent,
       String name,
       @Nullable Sampler sampler,
       List<Span> parentLinks,
       @Nullable Boolean recordEvents,
       @Nullable Kind kind,
-      @Nullable TimestampConverter timestampConverter) {
+      @Nullable Span parentSpan) {
     TraceParams activeTraceParams = options.traceConfig.getActiveTraceParams();
     Random random = options.randomHandler.current();
     TraceId traceId;
@@ -73,20 +73,20 @@
     SpanId parentSpanId = null;
     // TODO(bdrutu): Handle tracestate correctly not just propagate.
     Tracestate tracestate = TRACESTATE_DEFAULT;
-    if (parent == null || !parent.isValid()) {
+    if (parentContext == null || !parentContext.isValid()) {
       // New root span.
       traceId = TraceId.generateRandomId(random);
       // This is a root span so no remote or local parent.
       hasRemoteParent = null;
     } else {
       // New child span.
-      traceId = parent.getTraceId();
-      parentSpanId = parent.getSpanId();
-      tracestate = parent.getTracestate();
+      traceId = parentContext.getTraceId();
+      parentSpanId = parentContext.getSpanId();
+      tracestate = parentContext.getTracestate();
     }
     TraceOptions traceOptions =
         makeSamplingDecision(
-                parent,
+                parentContext,
                 hasRemoteParent,
                 name,
                 sampler,
@@ -96,22 +96,33 @@
                 activeTraceParams)
             ? SAMPLED_TRACE_OPTIONS
             : NOT_SAMPLED_TRACE_OPTIONS;
-    Span span =
-        (traceOptions.isSampled() || Boolean.TRUE.equals(recordEvents))
-            ? RecordEventsSpanImpl.startSpan(
-                SpanContext.create(traceId, spanId, traceOptions, tracestate),
-                name,
-                kind,
-                parentSpanId,
-                hasRemoteParent,
-                activeTraceParams,
-                options.startEndHandler,
-                timestampConverter,
-                options.clock)
-            : NoRecordEventsSpanImpl.create(
-                SpanContext.create(traceId, spanId, traceOptions, tracestate));
-    linkSpans(span, parentLinks);
-    return span;
+
+    if (traceOptions.isSampled() || Boolean.TRUE.equals(recordEvents)) {
+      // Pass the timestamp converter from the parent to ensure that the recorded events are in
+      // the right order. Implementation uses System.nanoTime() which is monotonically increasing.
+      TimestampConverter timestampConverter = null;
+      if (parentSpan instanceof RecordEventsSpanImpl) {
+        RecordEventsSpanImpl parentRecordEventsSpan = (RecordEventsSpanImpl) parentSpan;
+        timestampConverter = parentRecordEventsSpan.getTimestampConverter();
+        parentRecordEventsSpan.addChild();
+      }
+      Span span =
+          RecordEventsSpanImpl.startSpan(
+              SpanContext.create(traceId, spanId, traceOptions, tracestate),
+              name,
+              kind,
+              parentSpanId,
+              hasRemoteParent,
+              activeTraceParams,
+              options.startEndHandler,
+              timestampConverter,
+              options.clock);
+      linkSpans(span, parentLinks);
+      return span;
+    } else {
+      return NoRecordEventsSpanImpl.create(
+          SpanContext.create(traceId, spanId, traceOptions, tracestate));
+    }
   }
 
   private static boolean makeSamplingDecision(
@@ -179,34 +190,28 @@
 
   @Override
   public Span startSpan() {
-    SpanContext parentContext = remoteParentSpanContext;
-    Boolean hasRemoteParent = Boolean.TRUE;
-    TimestampConverter timestampConverter = null;
-    if (remoteParentSpanContext == null) {
+    if (remoteParentSpanContext != null) {
+      return startSpanInternal(
+          remoteParentSpanContext,
+          Boolean.TRUE,
+          name,
+          sampler,
+          parentLinks,
+          recordEvents,
+          kind,
+          null);
+    } else {
       // This is not a child of a remote Span. Get the parent SpanContext from the parent Span if
       // any.
-      Span parent = this.parent;
-      hasRemoteParent = Boolean.FALSE;
+      SpanContext parentContext = null;
+      Boolean hasRemoteParent = null;
       if (parent != null) {
         parentContext = parent.getContext();
-        // Pass the timestamp converter from the parent to ensure that the recorded events are in
-        // the right order. Implementation uses System.nanoTime() which is monotonically increasing.
-        if (parent instanceof RecordEventsSpanImpl) {
-          timestampConverter = ((RecordEventsSpanImpl) parent).getTimestampConverter();
-        }
-      } else {
-        hasRemoteParent = null;
+        hasRemoteParent = Boolean.FALSE;
       }
+      return startSpanInternal(
+          parentContext, hasRemoteParent, name, sampler, parentLinks, recordEvents, kind, parent);
     }
-    return startSpanInternal(
-        parentContext,
-        hasRemoteParent,
-        name,
-        sampler,
-        parentLinks,
-        recordEvents,
-        kind,
-        timestampConverter);
   }
 
   static final class Options {
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/StartEndHandlerImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/StartEndHandlerImpl.java
index 6adaa20..9533183 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/StartEndHandlerImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/StartEndHandlerImpl.java
@@ -18,7 +18,7 @@
 
 import io.opencensus.implcore.internal.EventQueue;
 import io.opencensus.implcore.trace.RecordEventsSpanImpl.StartEndHandler;
-import io.opencensus.implcore.trace.export.RunningSpanStoreImpl;
+import io.opencensus.implcore.trace.export.InProcessRunningSpanStore;
 import io.opencensus.implcore.trace.export.SampledSpanStoreImpl;
 import io.opencensus.implcore.trace.export.SpanExporterImpl;
 import io.opencensus.trace.Span.Options;
@@ -33,80 +33,77 @@
 @ThreadSafe
 public final class StartEndHandlerImpl implements StartEndHandler {
   private final SpanExporterImpl spanExporter;
-  @Nullable private final RunningSpanStoreImpl runningSpanStore;
-  @Nullable private final SampledSpanStoreImpl sampledSpanStore;
+  private final InProcessRunningSpanStore inProcessRunningSpanStore;
+  private final SampledSpanStoreImpl sampledSpanStore;
   private final EventQueue eventQueue;
-  // true if any of (runningSpanStore OR sampledSpanStore) are different than null, which
-  // means the spans with RECORD_EVENTS should be enqueued in the queue.
-  private final boolean enqueueEventForNonSampledSpans;
 
   /**
    * Constructs a new {@code StartEndHandlerImpl}.
    *
    * @param spanExporter the {@code SpanExporter} implementation.
-   * @param runningSpanStore the {@code RunningSpanStore} implementation.
+   * @param inProcessRunningSpanStore the {@code RunningSpanStore} implementation.
    * @param sampledSpanStore the {@code SampledSpanStore} implementation.
    * @param eventQueue the event queue where all the events are enqueued.
    */
   public StartEndHandlerImpl(
       SpanExporterImpl spanExporter,
-      @Nullable RunningSpanStoreImpl runningSpanStore,
-      @Nullable SampledSpanStoreImpl sampledSpanStore,
+      InProcessRunningSpanStore inProcessRunningSpanStore,
+      SampledSpanStoreImpl sampledSpanStore,
       EventQueue eventQueue) {
     this.spanExporter = spanExporter;
-    this.runningSpanStore = runningSpanStore;
+    this.inProcessRunningSpanStore = inProcessRunningSpanStore;
     this.sampledSpanStore = sampledSpanStore;
-    this.enqueueEventForNonSampledSpans = runningSpanStore != null || sampledSpanStore != null;
     this.eventQueue = eventQueue;
   }
 
   @Override
   public void onStart(RecordEventsSpanImpl span) {
-    if (span.getOptions().contains(Options.RECORD_EVENTS) && enqueueEventForNonSampledSpans) {
-      eventQueue.enqueue(new SpanStartEvent(span, runningSpanStore));
+    if (span.getOptions().contains(Options.RECORD_EVENTS)
+        && inProcessRunningSpanStore.getEnabled()) {
+      eventQueue.enqueue(new SpanStartEvent(span, inProcessRunningSpanStore));
     }
   }
 
   @Override
   public void onEnd(RecordEventsSpanImpl span) {
-    if ((span.getOptions().contains(Options.RECORD_EVENTS) && enqueueEventForNonSampledSpans)
+    if ((span.getOptions().contains(Options.RECORD_EVENTS)
+            && (inProcessRunningSpanStore.getEnabled() || sampledSpanStore.getEnabled()))
         || span.getContext().getTraceOptions().isSampled()) {
-      eventQueue.enqueue(new SpanEndEvent(span, spanExporter, runningSpanStore, sampledSpanStore));
+      eventQueue.enqueue(
+          new SpanEndEvent(span, spanExporter, inProcessRunningSpanStore, sampledSpanStore));
     }
   }
 
   // An EventQueue entry that records the start of the span event.
   private static final class SpanStartEvent implements EventQueue.Entry {
     private final RecordEventsSpanImpl span;
-    @Nullable private final RunningSpanStoreImpl activeSpansExporter;
+    private final InProcessRunningSpanStore inProcessRunningSpanStore;
 
-    SpanStartEvent(RecordEventsSpanImpl span, @Nullable RunningSpanStoreImpl activeSpansExporter) {
+    SpanStartEvent(RecordEventsSpanImpl span, InProcessRunningSpanStore inProcessRunningSpanStore) {
       this.span = span;
-      this.activeSpansExporter = activeSpansExporter;
+      this.inProcessRunningSpanStore = inProcessRunningSpanStore;
     }
 
     @Override
     public void process() {
-      if (activeSpansExporter != null) {
-        activeSpansExporter.onStart(span);
-      }
+      inProcessRunningSpanStore.onStart(span);
     }
   }
 
   // An EventQueue entry that records the end of the span event.
   private static final class SpanEndEvent implements EventQueue.Entry {
     private final RecordEventsSpanImpl span;
-    @Nullable private final RunningSpanStoreImpl runningSpanStore;
+    private final InProcessRunningSpanStore inProcessRunningSpanStore;
     private final SpanExporterImpl spanExporter;
     @Nullable private final SampledSpanStoreImpl sampledSpanStore;
 
     SpanEndEvent(
         RecordEventsSpanImpl span,
         SpanExporterImpl spanExporter,
-        @Nullable RunningSpanStoreImpl runningSpanStore,
+        InProcessRunningSpanStore inProcessRunningSpanStore,
         @Nullable SampledSpanStoreImpl sampledSpanStore) {
       this.span = span;
-      this.runningSpanStore = runningSpanStore;
+      this.inProcessRunningSpanStore = inProcessRunningSpanStore;
       this.spanExporter = spanExporter;
       this.sampledSpanStore = sampledSpanStore;
     }
@@ -116,9 +113,7 @@
       if (span.getContext().getTraceOptions().isSampled()) {
         spanExporter.addSpan(span);
       }
-      if (runningSpanStore != null) {
-        runningSpanStore.onEnd(span);
-      }
+      inProcessRunningSpanStore.onEnd(span);
       if (sampledSpanStore != null) {
         sampledSpanStore.considerForSampling(span);
       }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/TracerImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/TracerImpl.java
index 48df805..1768361 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/TracerImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/TracerImpl.java
@@ -29,7 +29,7 @@
 public final class TracerImpl extends Tracer {
   private final SpanBuilderImpl.Options spanBuilderOptions;
 
-  TracerImpl(
+  public TracerImpl(
       RandomHandler randomHandler,
       RecordEventsSpanImpl.StartEndHandler startEndHandler,
       Clock clock,
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/ExportComponentImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/ExportComponentImpl.java
index 1981738..7eae184 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/ExportComponentImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/export/ExportComponentImpl.java
@@ -24,12 +24,12 @@
 
 /** Implementation of the {@link ExportComponent}. */
 public final class ExportComponentImpl extends ExportComponent {
-  private static final int EXPORTER_BUFFER_SIZE = 32;
+  private static final int EXPORTER_BUFFER_SIZE = 2500;
   // Enforces that trace export exports data at least once every 5 seconds.
   private static final Duration EXPORTER_SCHEDULE_DELAY = Duration.create(5, 0);
 
   private final SpanExporterImpl spanExporter;
-  private final RunningSpanStoreImpl runningSpanStore;
+  private final InProcessRunningSpanStore inProcessRunningSpanStore;
   private final SampledSpanStoreImpl sampledSpanStore;
 
   @Override
@@ -38,8 +38,8 @@
   }
 
   @Override
-  public RunningSpanStoreImpl getRunningSpanStore() {
-    return runningSpanStore;
+  public InProcessRunningSpanStore getRunningSpanStore() {
+    return inProcessRunningSpanStore;
   }
 
   @Override
@@ -60,7 +60,7 @@
    * @return a new {@code ExportComponentImpl}.
    */
   public static ExportComponentImpl createWithInProcessStores(EventQueue eventQueue) {
-    return new ExportComponentImpl(true, eventQueue);
+    return new ExportComponentImpl(/* supportInProcessStores= */ true, eventQueue);
   }
 
   /**
@@ -70,7 +70,7 @@
    * @return a new {@code ExportComponentImpl}.
    */
   public static ExportComponentImpl createWithoutInProcessStores(EventQueue eventQueue) {
-    return new ExportComponentImpl(false, eventQueue);
+    return new ExportComponentImpl(/* supportInProcessStores= */ false, eventQueue);
   }
 
   /**
@@ -81,10 +81,7 @@
    */
   private ExportComponentImpl(boolean supportInProcessStores, EventQueue eventQueue) {
     this.spanExporter = SpanExporterImpl.create(EXPORTER_BUFFER_SIZE, EXPORTER_SCHEDULE_DELAY);
-    this.runningSpanStore =
-        supportInProcessStores
-            ? new InProcessRunningSpanStoreImpl()
-            : RunningSpanStoreImpl.getNoopRunningSpanStoreImpl();
+    this.inProcessRunningSpanStore = InProcessRunningSpanStore.create();
     this.sampledSpanStore =
         supportInProcessStores
             ? new InProcessSampledSpanStoreImpl(eventQueue)
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStore.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStore.java
new file mode 100644
index 0000000..ffc9ae1
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStore.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.trace.export;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import io.opencensus.implcore.trace.RecordEventsSpanImpl;
+import io.opencensus.implcore.trace.internal.ConcurrentIntrusiveList;
+import io.opencensus.trace.export.RunningSpanStore;
+import io.opencensus.trace.export.SpanData;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.ThreadSafe;
+
+/** In-process implementation of the {@link RunningSpanStore}. */
+@ThreadSafe
+public final class InProcessRunningSpanStore extends RunningSpanStore {
+  private static final Summary EMPTY_SUMMARY =
+      RunningSpanStore.Summary.create(Collections.<String, PerSpanNameSummary>emptyMap());
+
+  @Nullable private volatile InProcessRunningSpanStoreImpl impl = null;
+
+  static InProcessRunningSpanStore create() {
+    return new InProcessRunningSpanStore();
+  }
+
+  /**
+   * Adds the {@code Span} into the running spans list when the {@code Span} starts.
+   *
+   * @param span the {@code Span} that started.
+   */
+  public void onStart(RecordEventsSpanImpl span) {
+    InProcessRunningSpanStoreImpl impl = this.impl;
+    if (impl != null) {
+      impl.onStart(span);
+    }
+  }
+
+  /**
+   * Removes the {@code Span} from the running spans list when the {@code Span} ends.
+   *
+   * @param span the {@code Span} that ended.
+   */
+  public void onEnd(RecordEventsSpanImpl span) {
+    InProcessRunningSpanStoreImpl impl = this.impl;
+    if (impl != null) {
+      impl.onEnd(span);
+    }
+  }
+
+  /**
+   * Returns {@code true} if the RunningSpanStore is enabled.
+   *
+   * @return {@code true} if the RunningSpanStore is enabled.
+   */
+  public boolean getEnabled() {
+    return this.impl != null;
+  }
+
+  @Override
+  public Summary getSummary() {
+    InProcessRunningSpanStoreImpl impl = this.impl;
+    if (impl != null) {
+      return impl.getSummary();
+    }
+    return EMPTY_SUMMARY;
+  }
+
+  @Override
+  public Collection<SpanData> getRunningSpans(Filter filter) {
+    InProcessRunningSpanStoreImpl impl = this.impl;
+    if (impl != null) {
+      return impl.getRunningSpans(filter);
+    }
+    return Collections.emptyList();
+  }
+
+  @Override
+  public void setMaxNumberOfSpans(int maxNumberOfSpans) {
+    checkArgument(maxNumberOfSpans >= 0, "Invalid negative maxNumberOfElements");
+    synchronized (this) {
+      InProcessRunningSpanStoreImpl currentImpl = this.impl;
+      if (currentImpl != null) {
+        currentImpl.clear();
+      }
+      this.impl = null;
+      if (maxNumberOfSpans > 0) {
+        impl = new InProcessRunningSpanStoreImpl(maxNumberOfSpans);
+      }
+    }
+  }
+
+  private static final class InProcessRunningSpanStoreImpl {
+    private final ConcurrentIntrusiveList<RecordEventsSpanImpl> runningSpans;
+
+    private InProcessRunningSpanStoreImpl(int maxNumberOfElements) {
+      runningSpans = new ConcurrentIntrusiveList<>(maxNumberOfElements);
+    }
+
+    private void onStart(RecordEventsSpanImpl span) {
+      runningSpans.addElement(span);
+    }
+
+    private void onEnd(RecordEventsSpanImpl span) {
+      // TODO: Count and display when try to remove span that was not present.
+      runningSpans.removeElement(span);
+    }
+
+    private Summary getSummary() {
+      Collection<RecordEventsSpanImpl> allRunningSpans = runningSpans.getAll();
+      Map<String, Integer> numSpansPerName = new HashMap<String, Integer>();
+      for (RecordEventsSpanImpl span : allRunningSpans) {
+        Integer prevValue = numSpansPerName.get(span.getName());
+        numSpansPerName.put(span.getName(), prevValue != null ? prevValue + 1 : 1);
+      }
+      Map<String, PerSpanNameSummary> perSpanNameSummary =
+          new HashMap<String, PerSpanNameSummary>();
+      for (Map.Entry<String, Integer> it : numSpansPerName.entrySet()) {
+        perSpanNameSummary.put(it.getKey(), PerSpanNameSummary.create(it.getValue()));
+      }
+      return Summary.create(perSpanNameSummary);
+    }
+
+    private Collection<SpanData> getRunningSpans(Filter filter) {
+      Collection<RecordEventsSpanImpl> allRunningSpans = runningSpans.getAll();
+      int maxSpansToReturn =
+          filter.getMaxSpansToReturn() == 0 ? allRunningSpans.size() : filter.getMaxSpansToReturn();
+      List<SpanData> ret = new ArrayList<SpanData>(maxSpansToReturn);
+      for (RecordEventsSpanImpl span : allRunningSpans) {
+        if (ret.size() == maxSpansToReturn) {
+          break;
+        }
+        if (span.getName().equals(filter.getSpanName())) {
+          ret.add(span.toSpanData());
+        }
+      }
+      return ret;
+    }
+
+    private void clear() {
+      runningSpans.clear();
+    }
+  }
+}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImpl.java
deleted file mode 100644
index f7aeac7..0000000
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImpl.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.implcore.trace.export;
-
-import io.opencensus.implcore.trace.RecordEventsSpanImpl;
-import io.opencensus.implcore.trace.internal.ConcurrentIntrusiveList;
-import io.opencensus.trace.export.RunningSpanStore;
-import io.opencensus.trace.export.SpanData;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import javax.annotation.concurrent.ThreadSafe;
-
-/** In-process implementation of the {@link RunningSpanStore}. */
-@ThreadSafe
-public final class InProcessRunningSpanStoreImpl extends RunningSpanStoreImpl {
-  private final ConcurrentIntrusiveList<RecordEventsSpanImpl> runningSpans;
-
-  public InProcessRunningSpanStoreImpl() {
-    runningSpans = new ConcurrentIntrusiveList<RecordEventsSpanImpl>();
-  }
-
-  @Override
-  public void onStart(RecordEventsSpanImpl span) {
-    runningSpans.addElement(span);
-  }
-
-  @Override
-  public void onEnd(RecordEventsSpanImpl span) {
-    runningSpans.removeElement(span);
-  }
-
-  @Override
-  public Summary getSummary() {
-    Collection<RecordEventsSpanImpl> allRunningSpans = runningSpans.getAll();
-    Map<String, Integer> numSpansPerName = new HashMap<String, Integer>();
-    for (RecordEventsSpanImpl span : allRunningSpans) {
-      Integer prevValue = numSpansPerName.get(span.getName());
-      numSpansPerName.put(span.getName(), prevValue != null ? prevValue + 1 : 1);
-    }
-    Map<String, PerSpanNameSummary> perSpanNameSummary = new HashMap<String, PerSpanNameSummary>();
-    for (Map.Entry<String, Integer> it : numSpansPerName.entrySet()) {
-      perSpanNameSummary.put(it.getKey(), PerSpanNameSummary.create(it.getValue()));
-    }
-    Summary summary = Summary.create(perSpanNameSummary);
-    return summary;
-  }
-
-  @Override
-  public Collection<SpanData> getRunningSpans(Filter filter) {
-    Collection<RecordEventsSpanImpl> allRunningSpans = runningSpans.getAll();
-    int maxSpansToReturn =
-        filter.getMaxSpansToReturn() == 0 ? allRunningSpans.size() : filter.getMaxSpansToReturn();
-    List<SpanData> ret = new ArrayList<SpanData>(maxSpansToReturn);
-    for (RecordEventsSpanImpl span : allRunningSpans) {
-      if (ret.size() == maxSpansToReturn) {
-        break;
-      }
-      if (span.getName().equals(filter.getSpanName())) {
-        ret.add(span.toSpanData());
-      }
-    }
-    return ret;
-  }
-}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessSampledSpanStoreImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessSampledSpanStoreImpl.java
index 0d8e493..951d3d2 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessSampledSpanStoreImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/export/InProcessSampledSpanStoreImpl.java
@@ -279,6 +279,12 @@
   }
 
   @Override
+  public boolean getEnabled() {
+    return true;
+  }
+
+  @Override
+  @SuppressWarnings("deprecation")
   public void registerSpanNamesForCollection(Collection<String> spanNames) {
     eventQueue.enqueue(new RegisterSpanNameEvent(this, spanNames));
   }
@@ -315,6 +321,7 @@
   }
 
   @Override
+  @SuppressWarnings("deprecation")
   public void unregisterSpanNamesForCollection(Collection<String> spanNames) {
     eventQueue.enqueue(new UnregisterSpanNameEvent(this, spanNames));
   }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/RunningSpanStoreImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/RunningSpanStoreImpl.java
deleted file mode 100644
index 962f5b0..0000000
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/RunningSpanStoreImpl.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2018, OpenCensus Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.opencensus.implcore.trace.export;
-
-import io.opencensus.implcore.trace.RecordEventsSpanImpl;
-import io.opencensus.trace.export.RunningSpanStore;
-import io.opencensus.trace.export.SpanData;
-import java.util.Collection;
-import java.util.Collections;
-
-/** Abstract implementation of the {@link RunningSpanStore}. */
-public abstract class RunningSpanStoreImpl extends RunningSpanStore {
-
-  private static final RunningSpanStoreImpl NOOP_RUNNING_SPAN_STORE_IMPL =
-      new NoopRunningSpanStoreImpl();
-
-  /** Returns the no-op implementation of the {@link RunningSpanStoreImpl}. */
-  static RunningSpanStoreImpl getNoopRunningSpanStoreImpl() {
-    return NOOP_RUNNING_SPAN_STORE_IMPL;
-  }
-
-  /**
-   * Adds the {@code Span} into the running spans list when the {@code Span} starts.
-   *
-   * @param span the {@code Span} that started.
-   */
-  public abstract void onStart(RecordEventsSpanImpl span);
-
-  /**
-   * Removes the {@code Span} from the running spans list when the {@code Span} ends.
-   *
-   * @param span the {@code Span} that ended.
-   */
-  public abstract void onEnd(RecordEventsSpanImpl span);
-
-  private static final class NoopRunningSpanStoreImpl extends RunningSpanStoreImpl {
-
-    private static final Summary EMPTY_SUMMARY =
-        RunningSpanStore.Summary.create(Collections.<String, PerSpanNameSummary>emptyMap());
-
-    @Override
-    public void onStart(RecordEventsSpanImpl span) {}
-
-    @Override
-    public void onEnd(RecordEventsSpanImpl span) {}
-
-    @Override
-    public Summary getSummary() {
-      return EMPTY_SUMMARY;
-    }
-
-    @Override
-    public Collection<SpanData> getRunningSpans(Filter filter) {
-      return Collections.<SpanData>emptyList();
-    }
-  }
-}
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/SampledSpanStoreImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/SampledSpanStoreImpl.java
index e67c2f8..3a882a1 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/SampledSpanStoreImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/export/SampledSpanStoreImpl.java
@@ -41,6 +41,13 @@
    */
   public abstract void considerForSampling(RecordEventsSpanImpl span);
 
+  /**
+   * Returns {@code true} if the SampledSpanStore is enabled.
+   *
+   * @return {@code true} if the SampledSpanStore is enabled.
+   */
+  public abstract boolean getEnabled();
+
   protected void shutdown() {}
 
   private static final class NoopSampledSpanStoreImpl extends SampledSpanStoreImpl {
@@ -58,9 +65,16 @@
     public void considerForSampling(RecordEventsSpanImpl span) {}
 
     @Override
+    public boolean getEnabled() {
+      return false;
+    }
+
+    @Override
+    @SuppressWarnings("deprecation")
     public void registerSpanNamesForCollection(Collection<String> spanNames) {}
 
     @Override
+    @SuppressWarnings("deprecation")
     public void unregisterSpanNamesForCollection(Collection<String> spanNames) {}
 
     @Override
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/export/SpanExporterImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/export/SpanExporterImpl.java
index 51a7b05..32fb6c5 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/export/SpanExporterImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/export/SpanExporterImpl.java
@@ -18,8 +18,15 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import io.opencensus.common.Duration;
+import io.opencensus.common.ToLongFunction;
+import io.opencensus.implcore.internal.CheckerFrameworkUtils;
 import io.opencensus.implcore.internal.DaemonThreadFactory;
 import io.opencensus.implcore.trace.RecordEventsSpanImpl;
+import io.opencensus.metrics.DerivedLongCumulative;
+import io.opencensus.metrics.DerivedLongGauge;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.MetricOptions;
+import io.opencensus.metrics.Metrics;
 import io.opencensus.trace.export.ExportComponent;
 import io.opencensus.trace.export.SpanData;
 import io.opencensus.trace.export.SpanExporter;
@@ -32,9 +39,37 @@
 import java.util.logging.Logger;
 import javax.annotation.concurrent.GuardedBy;
 
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
 /** Implementation of the {@link SpanExporter}. */
 public final class SpanExporterImpl extends SpanExporter {
   private static final Logger logger = Logger.getLogger(ExportComponent.class.getName());
+  private static final DerivedLongCumulative droppedSpans =
+      Metrics.getMetricRegistry()
+          .addDerivedLongCumulative(
+              "oc_worker_spans_dropped",
+              MetricOptions.builder()
+                  .setDescription("Number of spans dropped by the exporter thread.")
+                  .setUnit("1")
+                  .build());
+  private static final DerivedLongCumulative pushedSpans =
+      Metrics.getMetricRegistry()
+          .addDerivedLongCumulative(
+              "oc_worker_spans_pushed",
+              MetricOptions.builder()
+                  .setDescription("Number of spans pushed by the exporter thread to the exporter.")
+                  .setUnit("1")
+                  .build());
+  private static final DerivedLongGauge referencedSpans =
+      Metrics.getMetricRegistry()
+          .addDerivedLongGauge(
+              "oc_worker_spans_referenced",
+              MetricOptions.builder()
+                  .setDescription("Current number of spans referenced by the exporter thread.")
+                  .setUnit("1")
+                  .build());
 
   private final Worker worker;
   private final Thread workerThread;
@@ -88,6 +123,42 @@
         new DaemonThreadFactory("ExportComponent.ServiceExporterThread").newThread(worker);
     this.workerThread.start();
     this.worker = worker;
+    droppedSpans.createTimeSeries(
+        Collections.<LabelValue>emptyList(), this.worker, new ReportDroppedSpans());
+    referencedSpans.createTimeSeries(
+        Collections.<LabelValue>emptyList(), this.worker, new ReportReferencedSpans());
+    pushedSpans.createTimeSeries(
+        Collections.<LabelValue>emptyList(), this.worker, new ReportPushedSpans());
+  }
+
+  private static class ReportDroppedSpans implements ToLongFunction</*@Nullable*/ Worker> {
+    @Override
+    public long applyAsLong(/*@Nullable*/ Worker worker) {
+      if (worker == null) {
+        return 0;
+      }
+      return worker.getDroppedSpans();
+    }
+  }
+
+  private static class ReportReferencedSpans implements ToLongFunction</*@Nullable*/ Worker> {
+    @Override
+    public long applyAsLong(/*@Nullable*/ Worker worker) {
+      if (worker == null) {
+        return 0;
+      }
+      return worker.getReferencedSpans();
+    }
+  }
+
+  private static class ReportPushedSpans implements ToLongFunction</*@Nullable*/ Worker> {
+    @Override
+    public long applyAsLong(/*@Nullable*/ Worker worker) {
+      if (worker == null) {
+        return 0;
+      }
+      return worker.getPushedSpans();
+    }
   }
 
   @VisibleForTesting
@@ -95,6 +166,21 @@
     return workerThread;
   }
 
+  @VisibleForTesting
+  long getDroppedSpans() {
+    return worker.getDroppedSpans();
+  }
+
+  @VisibleForTesting
+  long getReferencedSpans() {
+    return worker.getReferencedSpans();
+  }
+
+  @VisibleForTesting
+  long getPushedSpans() {
+    return worker.getPushedSpans();
+  }
+
   // Worker in a thread that batches multiple span data and calls the registered services to export
   // that data.
   //
@@ -110,15 +196,30 @@
     @GuardedBy("monitor")
     private final List<RecordEventsSpanImpl> spans;
 
-    private final Map<String, Handler> serviceHandlers = new ConcurrentHashMap<String, Handler>();
+    @GuardedBy("monitor")
+    private long referencedSpans = 0;
+
+    @GuardedBy("monitor")
+    private long droppedSpans = 0;
+
+    @GuardedBy("monitor")
+    private long pushedSpans = 0;
+
+    private final Map<String, Handler> serviceHandlers = new ConcurrentHashMap<>();
     private final int bufferSize;
+    private final long maxReferencedSpans;
     private final long scheduleDelayMillis;
 
     // See SpanExporterImpl#addSpan.
     private void addSpan(RecordEventsSpanImpl span) {
       synchronized (monitor) {
+        if (referencedSpans == maxReferencedSpans) {
+          droppedSpans++;
+          return;
+        }
         this.spans.add(span);
-        if (spans.size() > bufferSize) {
+        referencedSpans++;
+        if (spans.size() >= bufferSize) {
           monitor.notifyAll();
         }
       }
@@ -152,27 +253,22 @@
     }
 
     private Worker(int bufferSize, Duration scheduleDelay) {
-      spans = new ArrayList<RecordEventsSpanImpl>(bufferSize);
+      spans = new ArrayList<>(bufferSize);
       this.bufferSize = bufferSize;
+      // We notify the worker thread when bufferSize elements in the queue, so we will most likely
+      // have to process more than bufferSize elements but less than 2 * bufferSize in that cycle.
+      // During the processing time we want to allow the same amount of elements to be queued.
+      // So we need to have 4 * bufferSize maximum elements referenced as an estimate.
+      this.maxReferencedSpans = 4L * bufferSize;
       this.scheduleDelayMillis = scheduleDelay.toMillis();
     }
 
-    // Returns an unmodifiable list of all buffered spans data to ensure that any registered
-    // service handler cannot modify the list.
-    private static List<SpanData> fromSpanImplToSpanData(List<RecordEventsSpanImpl> spans) {
-      List<SpanData> spanDatas = new ArrayList<SpanData>(spans.size());
-      for (RecordEventsSpanImpl span : spans) {
-        spanDatas.add(span.toSpanData());
-      }
-      return Collections.unmodifiableList(spanDatas);
-    }
-
     @Override
     public void run() {
       while (true) {
         // Copy all the batched spans in a separate list to release the monitor lock asap to
         // avoid blocking the producer thread.
-        List<RecordEventsSpanImpl> spansCopy;
+        ArrayList<RecordEventsSpanImpl> spansCopy;
         synchronized (monitor) {
           if (spans.size() < bufferSize) {
             do {
@@ -187,27 +283,77 @@
               }
             } while (spans.isEmpty());
           }
-          spansCopy = new ArrayList<RecordEventsSpanImpl>(spans);
+          spansCopy = new ArrayList<>(spans);
           spans.clear();
         }
         // Execute the batch export outside the synchronized to not block all producers.
-        final List<SpanData> spanDataList = fromSpanImplToSpanData(spansCopy);
-        if (!spanDataList.isEmpty()) {
-          onBatchExport(spanDataList);
-        }
+        exportBatches(spansCopy);
       }
     }
 
-    void flush() {
-      List<RecordEventsSpanImpl> spansCopy;
+    private void flush() {
+      ArrayList<RecordEventsSpanImpl> spansCopy;
       synchronized (monitor) {
-        spansCopy = new ArrayList<RecordEventsSpanImpl>(spans);
+        spansCopy = new ArrayList<>(spans);
         spans.clear();
       }
+      // Execute the batch export outside the synchronized to not block all producers.
+      exportBatches(spansCopy);
+    }
 
-      final List<SpanData> spanDataList = fromSpanImplToSpanData(spansCopy);
+    private long getDroppedSpans() {
+      synchronized (monitor) {
+        return droppedSpans;
+      }
+    }
+
+    private long getReferencedSpans() {
+      synchronized (monitor) {
+        return referencedSpans;
+      }
+    }
+
+    private long getPushedSpans() {
+      synchronized (monitor) {
+        return pushedSpans;
+      }
+    }
+
+    private void exportBatches(ArrayList<RecordEventsSpanImpl> spansCopy) {
+      ArrayList<SpanData> spanDataList = new ArrayList<>(bufferSize);
+      for (int i = 0; i < spansCopy.size(); i++) {
+        spanDataList.add(spansCopy.get(i).toSpanData());
+
+        // Remove the reference to the RecordEventsSpanImpl to allow GC to free the memory.
+        // TODO: Refactor the handling of the spans list to remove this call to castNonNull. Setting
+        // the elements to null after they are read is safe because 'exportBatches' is only ever
+        // called on a copy of 'spans', and the copy is not reused.
+        spansCopy.set(i, (RecordEventsSpanImpl) CheckerFrameworkUtils.castNonNull(null));
+
+        if (spanDataList.size() == bufferSize) {
+          // One full batch, export it now. Wrap the list with unmodifiableList to ensure exporter
+          // does not change the list.
+          onBatchExport(Collections.unmodifiableList(spanDataList));
+          // Cannot clear because the exporter may still have a reference to this list (e.g. async
+          // scheduled work), so just create a new list.
+          spanDataList = new ArrayList<>(bufferSize);
+          // We removed reference for bufferSize Spans.
+          synchronized (monitor) {
+            referencedSpans -= bufferSize;
+            pushedSpans += bufferSize;
+          }
+        }
+      }
+      // Last incomplete batch, send this as well.
       if (!spanDataList.isEmpty()) {
-        onBatchExport(spanDataList);
+        // Wrap the list with unmodifiableList to ensure exporter does not change the list.
+        onBatchExport(Collections.unmodifiableList(spanDataList));
+        // We removed reference for spanDataList.size() Spans.
+        synchronized (monitor) {
+          referencedSpans -= spanDataList.size();
+          pushedSpans += spanDataList.size();
+        }
+        spanDataList.clear();
       }
     }
   }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveList.java b/impl_core/src/main/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveList.java
index 22d8e41..5f195dd 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveList.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveList.java
@@ -18,7 +18,6 @@
 
 import static com.google.common.base.Preconditions.checkArgument;
 
-import io.opencensus.implcore.internal.CheckerFrameworkUtils;
 import io.opencensus.implcore.trace.internal.ConcurrentIntrusiveList.Element;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -61,21 +60,36 @@
  */
 @ThreadSafe
 public final class ConcurrentIntrusiveList<T extends Element<T>> {
+  private final int capacity;
   private int size = 0;
   @Nullable private T head = null;
 
-  public ConcurrentIntrusiveList() {}
+  /**
+   * Constructs a new {@code ConcurrentIntrusiveList}.
+   *
+   * @param capacity must be greater than {@code 0}.
+   */
+  public ConcurrentIntrusiveList(int capacity) {
+    checkArgument(capacity > 0, "Capacity needs to be greater than 0.");
+    this.capacity = capacity;
+  }
 
   /**
-   * Adds the given {@code element} to the list.
+   * Adds the given {@code element} to the list. If the number of elements will be larger than the
+   * capacity then the oldest element in the list will be removed.
    *
    * @param element the element to add.
-   * @throws IllegalArgumentException if the element is already in a list.
+   * @return {@code false} if the element is already in the list or if adding this element will
+   *     exceed capacity.
    */
-  public synchronized void addElement(T element) {
-    checkArgument(
-        element.getNext() == null && element.getPrev() == null && element != head,
-        "Element already in a list.");
+  public synchronized boolean addElement(T element) {
+    if (element.getNext() != null
+        || element.getPrev() != null
+        || element == head
+        || size >= capacity) {
+      // Element already in a list.
+      return false;
+    }
     size++;
     if (head == null) {
       head = element;
@@ -84,38 +98,42 @@
       element.setNext(head);
       head = element;
     }
+    return true;
   }
 
   /**
    * Removes the given {@code element} from the list.
    *
    * @param element the element to remove.
-   * @throws IllegalArgumentException if the element is not in the list.
    */
-  public synchronized void removeElement(T element) {
-    checkArgument(
-        element.getNext() != null || element.getPrev() != null || element == head,
-        "Element not in the list.");
+  public synchronized boolean removeElement(T element) {
+    if (element.getNext() == null && element.getPrev() == null && element != head) {
+      // Element not in the list.
+      return false;
+    }
     size--;
-    if (element.getPrev() == null) {
+    T prev = element.getPrev();
+    T next = element.getNext();
+    if (prev == null) {
       // This is the first element
-      head = element.getNext();
+      head = next;
       if (head != null) {
         // If more than one element in the list.
         head.setPrev(null);
         element.setNext(null);
       }
-    } else if (element.getNext() == null) {
+    } else if (next == null) {
       // This is the last element, and there is at least another element because
       // element.getPrev() != null.
-      CheckerFrameworkUtils.castNonNull(element.getPrev()).setNext(null);
+      prev.setNext(null);
       element.setPrev(null);
     } else {
-      CheckerFrameworkUtils.castNonNull(element.getPrev()).setNext(element.getNext());
-      CheckerFrameworkUtils.castNonNull(element.getNext()).setPrev(element.getPrev());
+      prev.setNext(element.getNext());
+      next.setPrev(element.getPrev());
       element.setNext(null);
       element.setPrev(null);
     }
+    return true;
   }
 
   /**
@@ -127,6 +145,19 @@
     return size;
   }
 
+  /** Clears all the elements from the list. */
+  public synchronized void clear() {
+    while (true) {
+      T currentHead = head;
+      if (currentHead == null) {
+        // No more elements in the list.
+        return;
+      }
+      // This will move the head.
+      removeElement(currentHead);
+    }
+  }
+
   /**
    * Returns all the elements from this list.
    *
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/PropagationComponentImpl.java b/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/PropagationComponentImpl.java
index f608543..acd9ecf 100644
--- a/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/PropagationComponentImpl.java
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/PropagationComponentImpl.java
@@ -23,7 +23,8 @@
 /** Implementation of the {@link PropagationComponent}. */
 public class PropagationComponentImpl extends PropagationComponent {
   private final BinaryFormat binaryFormat = new BinaryFormatImpl();
-  private final B3Format b3Format = new B3Format();
+  private final TextFormat b3Format = new B3Format();
+  private final TextFormat traceContextFormat = new TraceContextFormat();
 
   @Override
   public BinaryFormat getBinaryFormat() {
@@ -34,4 +35,9 @@
   public TextFormat getB3Format() {
     return b3Format;
   }
+
+  @Override
+  public TextFormat getTraceContextFormat() {
+    return traceContextFormat;
+  }
 }
diff --git a/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/TraceContextFormat.java b/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/TraceContextFormat.java
new file mode 100644
index 0000000..003cf61
--- /dev/null
+++ b/impl_core/src/main/java/io/opencensus/implcore/trace/propagation/TraceContextFormat.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.trace.propagation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Splitter;
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Pattern;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.NonNull;
+*/
+
+/**
+ * Implementation of the TraceContext propagation protocol. See <a
+ * href=https://github.com/w3c/distributed-tracing>w3c/distributed-tracing</a>.
+ */
+public class TraceContextFormat extends TextFormat {
+  private static final Tracestate TRACESTATE_DEFAULT = Tracestate.builder().build();
+  @VisibleForTesting static final String TRACEPARENT = "traceparent";
+  @VisibleForTesting static final String TRACESTATE = "tracestate";
+  private static final List<String> FIELDS =
+      Collections.unmodifiableList(Arrays.asList(TRACEPARENT, TRACESTATE));
+
+  private static final String VERSION = "00";
+  private static final int VERSION_SIZE = 2;
+  private static final char TRACEPARENT_DELIMITER = '-';
+  private static final int TRACEPARENT_DELIMITER_SIZE = 1;
+  private static final int TRACE_ID_HEX_SIZE = 2 * TraceId.SIZE;
+  private static final int SPAN_ID_HEX_SIZE = 2 * SpanId.SIZE;
+  private static final int TRACE_OPTION_HEX_SIZE = 2 * TraceOptions.SIZE;
+  private static final int TRACE_ID_OFFSET = VERSION_SIZE + TRACEPARENT_DELIMITER_SIZE;
+  private static final int SPAN_ID_OFFSET =
+      TRACE_ID_OFFSET + TRACE_ID_HEX_SIZE + TRACEPARENT_DELIMITER_SIZE;
+  private static final int TRACE_OPTION_OFFSET =
+      SPAN_ID_OFFSET + SPAN_ID_HEX_SIZE + TRACEPARENT_DELIMITER_SIZE;
+  private static final int TRACEPARENT_HEADER_SIZE = TRACE_OPTION_OFFSET + TRACE_OPTION_HEX_SIZE;
+  private static final int TRACESTATE_MAX_SIZE = 512;
+  private static final int TRACESTATE_MAX_MEMBERS = 32;
+  private static final char TRACESTATE_KEY_VALUE_DELIMITER = '=';
+  private static final char TRACESTATE_ENTRY_DELIMITER = ',';
+  private static final Splitter TRACESTATE_ENTRY_DELIMITER_SPLITTER =
+      Splitter.on(Pattern.compile("[ \t]*" + TRACESTATE_ENTRY_DELIMITER + "[ \t]*"));
+
+  @Override
+  public List<String> fields() {
+    return FIELDS;
+  }
+
+  @Override
+  public <C /*>>> extends @NonNull Object*/> void inject(
+      SpanContext spanContext, C carrier, Setter<C> setter) {
+    checkNotNull(spanContext, "spanContext");
+    checkNotNull(setter, "setter");
+    checkNotNull(carrier, "carrier");
+    char[] chars = new char[TRACEPARENT_HEADER_SIZE];
+    chars[0] = VERSION.charAt(0);
+    chars[1] = VERSION.charAt(1);
+    chars[2] = TRACEPARENT_DELIMITER;
+    spanContext.getTraceId().copyLowerBase16To(chars, TRACE_ID_OFFSET);
+    chars[SPAN_ID_OFFSET - 1] = TRACEPARENT_DELIMITER;
+    spanContext.getSpanId().copyLowerBase16To(chars, SPAN_ID_OFFSET);
+    chars[TRACE_OPTION_OFFSET - 1] = TRACEPARENT_DELIMITER;
+    spanContext.getTraceOptions().copyLowerBase16To(chars, TRACE_OPTION_OFFSET);
+    setter.put(carrier, TRACEPARENT, new String(chars));
+    List<Tracestate.Entry> entries = spanContext.getTracestate().getEntries();
+    if (entries.isEmpty()) {
+      // No need to add an empty "tracestate" header.
+      return;
+    }
+    StringBuilder stringBuilder = new StringBuilder(TRACESTATE_MAX_SIZE);
+    for (Tracestate.Entry entry : entries) {
+      if (stringBuilder.length() != 0) {
+        stringBuilder.append(TRACESTATE_ENTRY_DELIMITER);
+      }
+      stringBuilder
+          .append(entry.getKey())
+          .append(TRACESTATE_KEY_VALUE_DELIMITER)
+          .append(entry.getValue());
+    }
+    setter.put(carrier, TRACESTATE, stringBuilder.toString());
+  }
+
+  @Override
+  public <C /*>>> extends @NonNull Object*/> SpanContext extract(C carrier, Getter<C> getter)
+      throws SpanContextParseException {
+    checkNotNull(carrier, "carrier");
+    checkNotNull(getter, "getter");
+    TraceId traceId;
+    SpanId spanId;
+    TraceOptions traceOptions;
+    String traceparent = getter.get(carrier, TRACEPARENT);
+    if (traceparent == null) {
+      throw new SpanContextParseException("Traceparent not present");
+    }
+    try {
+      // TODO(bdrutu): Do we need to verify that version is hex and that for the version
+      // the length is the expected one?
+      checkArgument(
+          traceparent.charAt(TRACE_OPTION_OFFSET - 1) == TRACEPARENT_DELIMITER
+              && (traceparent.length() == TRACEPARENT_HEADER_SIZE
+                  || (traceparent.length() > TRACEPARENT_HEADER_SIZE
+                      && traceparent.charAt(TRACEPARENT_HEADER_SIZE) == TRACEPARENT_DELIMITER))
+              && traceparent.charAt(SPAN_ID_OFFSET - 1) == TRACEPARENT_DELIMITER
+              && traceparent.charAt(TRACE_OPTION_OFFSET - 1) == TRACEPARENT_DELIMITER,
+          "Missing or malformed TRACEPARENT.");
+
+      traceId = TraceId.fromLowerBase16(traceparent, TRACE_ID_OFFSET);
+      spanId = SpanId.fromLowerBase16(traceparent, SPAN_ID_OFFSET);
+      traceOptions = TraceOptions.fromLowerBase16(traceparent, TRACE_OPTION_OFFSET);
+    } catch (IllegalArgumentException e) {
+      throw new SpanContextParseException("Invalid traceparent: " + traceparent, e);
+    }
+
+    String tracestate = getter.get(carrier, TRACESTATE);
+    try {
+      if (tracestate == null || tracestate.isEmpty()) {
+        return SpanContext.create(traceId, spanId, traceOptions, TRACESTATE_DEFAULT);
+      }
+      Tracestate.Builder tracestateBuilder = Tracestate.builder();
+      List<String> listMembers = TRACESTATE_ENTRY_DELIMITER_SPLITTER.splitToList(tracestate);
+      checkArgument(
+          listMembers.size() <= TRACESTATE_MAX_MEMBERS, "Tracestate has too many elements.");
+      // Iterate in reverse order because when call builder set the elements is added in the
+      // front of the list.
+      for (int i = listMembers.size() - 1; i >= 0; i--) {
+        String listMember = listMembers.get(i);
+        int index = listMember.indexOf(TRACESTATE_KEY_VALUE_DELIMITER);
+        checkArgument(index != -1, "Invalid tracestate list-member format.");
+        tracestateBuilder.set(
+            listMember.substring(0, index), listMember.substring(index + 1, listMember.length()));
+      }
+      return SpanContext.create(traceId, spanId, traceOptions, tracestateBuilder.build());
+    } catch (IllegalArgumentException e) {
+      throw new SpanContextParseException("Invalid tracestate: " + tracestate, e);
+    }
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImplTest.java
new file mode 100644
index 0000000..7e08052
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleCumulativeImplTest.java
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.common.ToDoubleFunction;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.testing.common.TestClock;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DerivedDoubleCumulativeImpl}. */
+@RunWith(JUnit4.class)
+public class DerivedDoubleCumulativeImplTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String METRIC_NAME = "name";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final String METRIC_UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> LABEL_VALUES_1 =
+      Collections.singletonList(LabelValue.create("value1"));
+  private static final Timestamp START_TIME = Timestamp.create(60, 0);
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
+
+  private final TestClock testClock = TestClock.create();
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DOUBLE, LABEL_KEY);
+
+  private final DerivedDoubleCumulativeImpl derivedDoubleCumulative =
+      new DerivedDoubleCumulativeImpl(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          LABEL_KEY,
+          EMPTY_CONSTANT_LABELS,
+          START_TIME);
+
+  // helper class
+  public static class QueueManager {
+    public double size() {
+      return 2.5;
+    }
+  }
+
+  private static final ToDoubleFunction<Object> doubleFunction =
+      new ToDoubleFunction<Object>() {
+        @Override
+        public double applyAsDouble(Object value) {
+          return 5.5;
+        }
+      };
+  private static final ToDoubleFunction<Object> negativeDoubleFunction =
+      new ToDoubleFunction<Object>() {
+        @Override
+        public double applyAsDouble(Object value) {
+          return -200.5;
+        }
+      };
+  private static final ToDoubleFunction<QueueManager> queueManagerFunction =
+      new ToDoubleFunction<QueueManager>() {
+        @Override
+        public double applyAsDouble(QueueManager queue) {
+          return queue.size();
+        }
+      };
+
+  @Before
+  public void setUp() {
+    testClock.setTime(START_TIME);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedDoubleCumulative.createTimeSeries(null, null, doubleFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullElement() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
+    DerivedDoubleCumulativeImpl derivedDoubleCumulative =
+        new DerivedDoubleCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    derivedDoubleCumulative.createTimeSeries(labelValues, null, doubleFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithInvalidLabelSize() {
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    derivedDoubleCumulative.createTimeSeries(labelValues, null, doubleFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullFunction() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("function");
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, null);
+  }
+
+  @Test
+  public void createTimeSeries_WithObjFunction() {
+    derivedDoubleCumulative.createTimeSeries(
+        LABEL_VALUES, new QueueManager(), queueManagerFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.doubleValue(2.5), endTime), START_TIME)));
+  }
+
+  @Test
+  public void createTimeSeries_WithSameLabel() {
+    derivedDoubleCumulative.createTimeSeries(
+        LABEL_VALUES, new QueueManager(), queueManagerFunction);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("A different time series with the same labels already exists.");
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, queueManagerFunction);
+  }
+
+  @Test
+  public void addTimeSeries_WithNullObj() {
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, doubleFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.doubleValue(5.5), endTime), START_TIME)));
+  }
+
+  @Test
+  public void addTimeSeries_IgnoreNegativeValue() {
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, negativeDoubleFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.doubleValue(0), endTime), START_TIME)));
+  }
+
+  @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DerivedDoubleCumulativeImpl derivedDoubleCumulative2 =
+        new DerivedDoubleCumulativeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels, START_TIME);
+
+    derivedDoubleCumulative2.createTimeSeries(
+        labelValues, new QueueManager(), queueManagerFunction);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DOUBLE, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    TimeSeries expectedTimeSeries =
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.doubleValue(2.5), endTime), START_TIME);
+
+    Metric metric = derivedDoubleCumulative2.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList()).containsExactly(expectedTimeSeries);
+
+    derivedDoubleCumulative2.removeTimeSeries(labelValues);
+    Metric metric2 = derivedDoubleCumulative2.getMetric(testClock);
+    assertThat(metric2).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries() {
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, doubleFunction);
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+    derivedDoubleCumulative.removeTimeSeries(LABEL_VALUES);
+    assertThat(derivedDoubleCumulative.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedDoubleCumulative.removeTimeSeries(null);
+  }
+
+  @Test
+  public void multipleMetrics_GetMetric() {
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, doubleFunction);
+    derivedDoubleCumulative.createTimeSeries(
+        LABEL_VALUES_1, new QueueManager(), queueManagerFunction);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES, Point.create(Value.doubleValue(5.5), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES_1, Point.create(Value.doubleValue(2.5), endTime), START_TIME));
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+    assertThat(metric.getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getLabelValues().get(0))
+        .isEqualTo(LabelValue.create("value"));
+    assertThat(metric.getTimeSeriesList().get(1).getLabelValues().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(1).getLabelValues().get(0))
+        .isEqualTo(LabelValue.create("value1"));
+  }
+
+  @Test
+  public void clear() {
+    derivedDoubleCumulative.createTimeSeries(LABEL_VALUES, null, doubleFunction);
+    derivedDoubleCumulative.createTimeSeries(
+        LABEL_VALUES_1, new QueueManager(), queueManagerFunction);
+    Metric metric = derivedDoubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    derivedDoubleCumulative.clear();
+    assertThat(derivedDoubleCumulative.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void empty_GetMetrics() {
+    assertThat(derivedDoubleCumulative.getMetric(testClock)).isNull();
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImplTest.java
index e69a284..b141348 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedDoubleGaugeImplTest.java
@@ -33,6 +33,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -54,13 +55,17 @@
   private static final List<LabelValue> LABEL_VALUES_1 =
       Collections.singletonList(LabelValue.create("value1"));
   private static final Timestamp TEST_TIME = Timestamp.create(1234, 123);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
+
   private final TestClock testClock = TestClock.create(TEST_TIME);
   private static final MetricDescriptor METRIC_DESCRIPTOR =
       MetricDescriptor.create(
           METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
 
   private final DerivedDoubleGaugeImpl derivedDoubleGauge =
-      new DerivedDoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY);
+      new DerivedDoubleGaugeImpl(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY, EMPTY_CONSTANT_LABELS);
 
   // helper class
   public static class QueueManager {
@@ -104,9 +109,10 @@
         Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
     List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
     DerivedDoubleGaugeImpl derivedDoubleGauge =
-        new DerivedDoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new DerivedDoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     derivedDoubleGauge.createTimeSeries(labelValues, null, doubleFunction);
   }
 
@@ -115,7 +121,7 @@
     List<LabelValue> labelValues =
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     derivedDoubleGauge.createTimeSeries(labelValues, null, doubleFunction);
   }
 
@@ -161,6 +167,44 @@
   }
 
   @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DerivedDoubleGaugeImpl derivedDoubleGauge2 =
+        new DerivedDoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels);
+
+    derivedDoubleGauge2.createTimeSeries(labelValues, new QueueManager(), queueManagerFunction);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    TimeSeries expectedTimeSeries =
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.doubleValue(2.5), TEST_TIME), null);
+
+    Metric metric = derivedDoubleGauge2.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList()).containsExactly(expectedTimeSeries);
+
+    derivedDoubleGauge2.removeTimeSeries(labelValues);
+    Metric metric2 = derivedDoubleGauge2.getMetric(testClock);
+    assertThat(metric2).isNull();
+  }
+
+  @Test
   public void removeTimeSeries() {
     derivedDoubleGauge.createTimeSeries(LABEL_VALUES, null, doubleFunction);
     Metric metric = derivedDoubleGauge.getMetric(testClock);
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImplTest.java
new file mode 100644
index 0000000..11464ff
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongCumulativeImplTest.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.common.ToLongFunction;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.testing.common.TestClock;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DerivedLongCumulativeImpl}. */
+@RunWith(JUnit4.class)
+public class DerivedLongCumulativeImplTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String METRIC_NAME = "name";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final String METRIC_UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> LABEL_VALUES_1 =
+      Collections.singletonList(LabelValue.create("value1"));
+  private static final Timestamp START_TIME = Timestamp.create(60, 0);
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
+
+  private final TestClock testClock = TestClock.create();
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+
+  private final DerivedLongCumulativeImpl derivedLongCumulative =
+      new DerivedLongCumulativeImpl(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          LABEL_KEY,
+          EMPTY_CONSTANT_LABELS,
+          START_TIME);
+
+  // helper class
+  public static class QueueManager {
+    public long size() {
+      return 3;
+    }
+  }
+
+  private static final ToLongFunction<Object> longFunction =
+      new ToLongFunction<Object>() {
+        @Override
+        public long applyAsLong(Object value) {
+          return 15;
+        }
+      };
+  private static final ToLongFunction<Object> negativeLongFunction =
+      new ToLongFunction<Object>() {
+        @Override
+        public long applyAsLong(Object value) {
+          return -200;
+        }
+      };
+  private static final ToLongFunction<QueueManager> queueManagerFunction =
+      new ToLongFunction<QueueManager>() {
+        @Override
+        public long applyAsLong(QueueManager queue) {
+          return queue.size();
+        }
+      };
+
+  @Before
+  public void setUp() {
+    testClock.setTime(START_TIME);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedLongCumulative.createTimeSeries(null, null, longFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullElement() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
+    DerivedLongCumulativeImpl derivedLongCumulative =
+        new DerivedLongCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    derivedLongCumulative.createTimeSeries(labelValues, null, longFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithInvalidLabelSize() {
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    derivedLongCumulative.createTimeSeries(labelValues, null, longFunction);
+  }
+
+  @Test
+  public void createTimeSeries_WithNullFunction() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("function");
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, null);
+  }
+
+  @Test
+  public void createTimeSeries_WithObjFunction() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, new QueueManager(), queueManagerFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.longValue(3), endTime), START_TIME)));
+  }
+
+  @Test
+  public void createTimeSeries_WithSameLabel() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, new QueueManager(), queueManagerFunction);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("A different time series with the same labels already exists.");
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, queueManagerFunction);
+  }
+
+  @Test
+  public void addTimeSeries_WithNullObj() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, longFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.longValue(15), endTime), START_TIME)));
+  }
+
+  @Test
+  public void addTimeSeries_IgnoreNegativeValue() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, negativeLongFunction);
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.longValue(0), endTime), START_TIME)));
+  }
+
+  @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DerivedLongCumulativeImpl derivedLongCumulative2 =
+        new DerivedLongCumulativeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels, START_TIME);
+
+    derivedLongCumulative2.createTimeSeries(labelValues, new QueueManager(), queueManagerFunction);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    TimeSeries expectedTimeSeries =
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.longValue(3), endTime), START_TIME);
+
+    Metric metric = derivedLongCumulative2.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList()).containsExactly(expectedTimeSeries);
+
+    derivedLongCumulative2.removeTimeSeries(labelValues);
+    Metric metric2 = derivedLongCumulative2.getMetric(testClock);
+    assertThat(metric2).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, longFunction);
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+    derivedLongCumulative.removeTimeSeries(LABEL_VALUES);
+    assertThat(derivedLongCumulative.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    derivedLongCumulative.removeTimeSeries(null);
+  }
+
+  @Test
+  public void multipleMetrics_GetMetric() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, longFunction);
+    derivedLongCumulative.createTimeSeries(
+        LABEL_VALUES_1, new QueueManager(), queueManagerFunction);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES, Point.create(Value.longValue(15), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES_1, Point.create(Value.longValue(3), endTime), START_TIME));
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+    assertThat(metric.getTimeSeriesList().get(0).getLabelValues().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getLabelValues().get(0))
+        .isEqualTo(LabelValue.create("value"));
+    assertThat(metric.getTimeSeriesList().get(1).getLabelValues().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(1).getLabelValues().get(0))
+        .isEqualTo(LabelValue.create("value1"));
+  }
+
+  @Test
+  public void clear() {
+    derivedLongCumulative.createTimeSeries(LABEL_VALUES, null, longFunction);
+    derivedLongCumulative.createTimeSeries(
+        LABEL_VALUES_1, new QueueManager(), queueManagerFunction);
+    Metric metric = derivedLongCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    derivedLongCumulative.clear();
+    assertThat(derivedLongCumulative.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void empty_GetMetrics() {
+    assertThat(derivedLongCumulative.getMetric(testClock)).isNull();
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongGaugeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongGaugeImplTest.java
index ec9cad6..819aaac 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongGaugeImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DerivedLongGaugeImplTest.java
@@ -33,6 +33,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -53,6 +54,8 @@
       Collections.singletonList(LabelValue.create("value"));
   private static final List<LabelValue> LABEL_VALUES_1 =
       Collections.singletonList(LabelValue.create("value1"));
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
 
   private static final Timestamp TEST_TIME = Timestamp.create(1234, 123);
   private final TestClock testClock = TestClock.create(TEST_TIME);
@@ -62,7 +65,8 @@
           METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_INT64, LABEL_KEY);
 
   private final DerivedLongGaugeImpl derivedLongGauge =
-      new DerivedLongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY);
+      new DerivedLongGaugeImpl(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY, EMPTY_CONSTANT_LABELS);
 
   // helper class
   public static class QueueManager {
@@ -107,9 +111,10 @@
     List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
 
     DerivedLongGaugeImpl derivedLongGauge =
-        new DerivedLongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new DerivedLongGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     derivedLongGauge.createTimeSeries(labelValues, null, longFunction);
   }
 
@@ -119,7 +124,7 @@
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     derivedLongGauge.createTimeSeries(labelValues, null, longFunction);
   }
 
@@ -159,6 +164,43 @@
   }
 
   @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DerivedLongGaugeImpl derivedLongGauge2 =
+        new DerivedLongGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels);
+
+    derivedLongGauge2.createTimeSeries(labelValues, new QueueManager(), queueManagerFunction);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_INT64, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    TimeSeries expectedTimeSeries =
+        TimeSeries.createWithOnePoint(allValues, Point.create(Value.longValue(2), TEST_TIME), null);
+
+    Metric metric = derivedLongGauge2.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList()).containsExactly(expectedTimeSeries);
+
+    derivedLongGauge2.removeTimeSeries(labelValues);
+    Metric metric2 = derivedLongGauge2.getMetric(testClock);
+    assertThat(metric2).isNull();
+  }
+
+  @Test
   public void removeTimeSeries() {
     derivedLongGauge.createTimeSeries(LABEL_VALUES, null, longFunction);
     Metric metric = derivedLongGauge.getMetric(testClock);
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleCumulativeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleCumulativeImplTest.java
new file mode 100644
index 0000000..992c9b3
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleCumulativeImplTest.java
@@ -0,0 +1,357 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.implcore.metrics.DoubleCumulativeImpl.UNSET_VALUE;
+
+import com.google.common.testing.EqualsTester;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.DoubleCumulative.DoublePoint;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.testing.common.TestClock;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link DoubleCumulativeImpl}. */
+@RunWith(JUnit4.class)
+public class DoubleCumulativeImplTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String METRIC_NAME = "name";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final String METRIC_UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> LABEL_VALUES1 =
+      Collections.singletonList(LabelValue.create("value1"));
+  private static final List<LabelValue> DEFAULT_LABEL_VALUES =
+      Collections.singletonList(UNSET_VALUE);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
+
+  private static final Timestamp START_TIME = Timestamp.create(60, 0);
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private final TestClock testClock = TestClock.create();
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DOUBLE, LABEL_KEY);
+  private final DoubleCumulativeImpl doubleCumulativeMetric =
+      new DoubleCumulativeImpl(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          LABEL_KEY,
+          EMPTY_CONSTANT_LABELS,
+          START_TIME);
+
+  @Before
+  public void setUp() {
+    testClock.setTime(START_TIME);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    doubleCumulativeMetric.getOrCreateTimeSeries(null);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithNullElement() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
+
+    DoubleCumulativeImpl doubleCumulative =
+        new DoubleCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    doubleCumulative.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithInvalidLabelSize() {
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    doubleCumulativeMetric.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries() {
+    DoublePoint point = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point.add(100);
+    DoublePoint point1 = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point1.add(500);
+    assertThat(point).isSameInstanceAs(point1);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = doubleCumulativeMetric.getMetric(testClock);
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.doubleValue(600), endTime), START_TIME)));
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_IgnoreNegativePointValues() {
+    DoublePoint point = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point.add(-100);
+    point.add(25);
+    point.add(-33);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Metric metric = doubleCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.doubleValue(25));
+  }
+
+  @Test
+  public void getDefaultTimeSeries() {
+    DoublePoint point = doubleCumulativeMetric.getDefaultTimeSeries();
+    point.add(100);
+
+    DoublePoint point1 = doubleCumulativeMetric.getDefaultTimeSeries();
+    point1.add(-100);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = doubleCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    DEFAULT_LABEL_VALUES,
+                    Point.create(Value.doubleValue(100), endTime),
+                    START_TIME)));
+    assertThat(point).isSameInstanceAs(point1);
+  }
+
+  @Test
+  public void removeTimeSeries() {
+    doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    assertThat(doubleCumulativeMetric.getMetric(testClock))
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.doubleValue(0), START_TIME), START_TIME)));
+
+    doubleCumulativeMetric.removeTimeSeries(LABEL_VALUES);
+    assertThat(doubleCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    doubleCumulativeMetric.removeTimeSeries(null);
+  }
+
+  @Test
+  public void clear() {
+    DoublePoint doublePoint = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    doublePoint.add(100);
+
+    Metric metric = doubleCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+
+    doubleCumulativeMetric.clear();
+    assertThat(doubleCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DoubleCumulativeImpl doubleCumulative =
+        new DoubleCumulativeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels, START_TIME);
+
+    DoublePoint doublePoint = doubleCumulative.getOrCreateTimeSeries(labelValues);
+    doublePoint.add(1);
+    doublePoint.add(2);
+
+    DoublePoint defaultPoint = doubleCumulative.getDefaultTimeSeries();
+    defaultPoint.add(100);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_DOUBLE, allKeys);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    TimeSeries defaultTimeSeries =
+        TimeSeries.createWithOnePoint(
+            Arrays.asList(UNSET_VALUE, UNSET_VALUE, constantValue),
+            Point.create(Value.doubleValue(100), endTime),
+            START_TIME);
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.doubleValue(3), endTime), START_TIME));
+    expectedTimeSeriesList.add(defaultTimeSeries);
+
+    Metric metric = doubleCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+
+    doubleCumulative.removeTimeSeries(labelValues);
+    Metric metric2 = doubleCumulative.getMetric(testClock);
+    assertThat(metric2).isNotNull();
+    assertThat(metric2.getTimeSeriesList()).containsExactly(defaultTimeSeries);
+  }
+
+  @Test
+  public void pointImpl_InstanceOf() {
+    DoublePoint doublePoint = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    assertThat(doublePoint).isInstanceOf(DoubleCumulativeImpl.PointImpl.class);
+  }
+
+  @Test
+  public void multipleMetrics_GetMetric() {
+    DoublePoint doublePoint = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    doublePoint.add(1);
+    doublePoint.add(2);
+
+    DoublePoint defaultPoint = doubleCumulativeMetric.getDefaultTimeSeries();
+    defaultPoint.add(100);
+
+    DoublePoint doublePoint1 = doubleCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES1);
+    doublePoint1.add(-100);
+    doublePoint1.add(-20);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES, Point.create(Value.doubleValue(3), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            DEFAULT_LABEL_VALUES, Point.create(Value.doubleValue(100), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES1, Point.create(Value.doubleValue(0), endTime), START_TIME));
+
+    Metric metric = doubleCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(3);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+  }
+
+  @Test
+  public void empty_GetMetrics() {
+    assertThat(doubleCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void testEquals() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+
+    DoubleCumulativeImpl doubleCumulative =
+        new DoubleCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+
+    DoublePoint defaultPoint1 = doubleCumulative.getDefaultTimeSeries();
+    DoublePoint defaultPoint2 = doubleCumulative.getDefaultTimeSeries();
+    DoublePoint doublePoint1 = doubleCumulative.getOrCreateTimeSeries(labelValues);
+    DoublePoint doublePoint2 = doubleCumulative.getOrCreateTimeSeries(labelValues);
+
+    new EqualsTester()
+        .addEqualityGroup(defaultPoint1, defaultPoint2)
+        .addEqualityGroup(doublePoint1, doublePoint2)
+        .testEquals();
+
+    doubleCumulative.clear();
+
+    DoublePoint newDefaultPointAfterClear = doubleCumulative.getDefaultTimeSeries();
+    DoublePoint newDoublePointAfterClear = doubleCumulative.getOrCreateTimeSeries(labelValues);
+
+    doubleCumulative.removeTimeSeries(labelValues);
+    DoublePoint newDoublePointAfterRemove = doubleCumulative.getOrCreateTimeSeries(labelValues);
+
+    new EqualsTester()
+        .addEqualityGroup(defaultPoint1, defaultPoint2)
+        .addEqualityGroup(doublePoint1, doublePoint2)
+        .addEqualityGroup(newDefaultPointAfterClear)
+        .addEqualityGroup(newDoublePointAfterClear)
+        .addEqualityGroup(newDoublePointAfterRemove)
+        .testEquals();
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleGaugeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleGaugeImplTest.java
index b089908..b373a12 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleGaugeImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/DoubleGaugeImplTest.java
@@ -35,6 +35,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -57,6 +58,8 @@
       Collections.singletonList(LabelValue.create("value1"));
   private static final List<LabelValue> DEFAULT_LABEL_VALUES =
       Collections.singletonList(UNSET_VALUE);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
 
   private static final Timestamp TEST_TIME = Timestamp.create(1234, 123);
   private final TestClock testClock = TestClock.create(TEST_TIME);
@@ -64,7 +67,8 @@
       MetricDescriptor.create(
           METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
   private final DoubleGaugeImpl doubleGauge =
-      new DoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY);
+      new DoubleGaugeImpl(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY, EMPTY_CONSTANT_LABELS);
 
   @Test
   public void getOrCreateTimeSeries_WithNullLabelValues() {
@@ -80,9 +84,10 @@
     List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
 
     DoubleGaugeImpl doubleGauge =
-        new DoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new DoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     doubleGauge.getOrCreateTimeSeries(labelValues);
   }
 
@@ -92,7 +97,7 @@
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     doubleGauge.getOrCreateTimeSeries(labelValues);
   }
 
@@ -112,7 +117,7 @@
                 Collections.singletonList(
                     TimeSeries.createWithOnePoint(
                         LABEL_VALUES, Point.create(Value.doubleValue(500), TEST_TIME), null))));
-    assertThat(point).isSameAs(point1);
+    assertThat(point).isSameInstanceAs(point1);
   }
 
   @Test
@@ -153,7 +158,7 @@
                         DEFAULT_LABEL_VALUES,
                         Point.create(Value.doubleValue(400), TEST_TIME),
                         null))));
-    assertThat(point).isSameAs(point1);
+    assertThat(point).isSameInstanceAs(point1);
   }
 
   @Test
@@ -199,7 +204,8 @@
     List<LabelKey> labelKeys =
         Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
     DoubleGaugeImpl doubleGauge =
-        new DoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new DoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     DoublePoint defaultPoint = doubleGauge.getDefaultTimeSeries();
     defaultPoint.set(-230);
 
@@ -212,6 +218,58 @@
   }
 
   @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    DoubleGaugeImpl doubleGauge =
+        new DoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels);
+
+    DoublePoint doublePoint = doubleGauge.getOrCreateTimeSeries(labelValues);
+    doublePoint.add(1);
+    doublePoint.add(2);
+
+    DoublePoint defaultPoint = doubleGauge.getDefaultTimeSeries();
+    defaultPoint.set(100);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_DOUBLE, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    TimeSeries defaultTimeSeries =
+        TimeSeries.createWithOnePoint(
+            Arrays.asList(UNSET_VALUE, UNSET_VALUE, constantValue),
+            Point.create(Value.doubleValue(100), TEST_TIME),
+            null);
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.doubleValue(3), TEST_TIME), null));
+    expectedTimeSeriesList.add(defaultTimeSeries);
+
+    Metric metric = doubleGauge.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+
+    doubleGauge.removeTimeSeries(labelValues);
+    Metric metric2 = doubleGauge.getMetric(testClock);
+    assertThat(metric2).isNotNull();
+    assertThat(metric2.getTimeSeriesList()).containsExactly(defaultTimeSeries);
+  }
+
+  @Test
   public void pointImpl_InstanceOf() {
     DoublePoint doublePoint = doubleGauge.getOrCreateTimeSeries(LABEL_VALUES);
     assertThat(doublePoint).isInstanceOf(DoubleGaugeImpl.PointImpl.class);
@@ -261,7 +319,8 @@
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
 
     DoubleGaugeImpl doubleGauge =
-        new DoubleGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new DoubleGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
 
     DoublePoint defaultPoint1 = doubleGauge.getDefaultTimeSeries();
     DoublePoint defaultPoint2 = doubleGauge.getDefaultTimeSeries();
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/LongCumulativeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/LongCumulativeImplTest.java
new file mode 100644
index 0000000..e0421c5
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/LongCumulativeImplTest.java
@@ -0,0 +1,357 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.metrics;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.implcore.metrics.LongCumulativeImpl.UNSET_VALUE;
+
+import com.google.common.testing.EqualsTester;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.LabelKey;
+import io.opencensus.metrics.LabelValue;
+import io.opencensus.metrics.LongCumulative.LongPoint;
+import io.opencensus.metrics.export.Metric;
+import io.opencensus.metrics.export.MetricDescriptor;
+import io.opencensus.metrics.export.MetricDescriptor.Type;
+import io.opencensus.metrics.export.Point;
+import io.opencensus.metrics.export.TimeSeries;
+import io.opencensus.metrics.export.Value;
+import io.opencensus.testing.common.TestClock;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link LongCumulativeImpl}. */
+@RunWith(JUnit4.class)
+public class LongCumulativeImplTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private static final String METRIC_NAME = "name";
+  private static final String METRIC_DESCRIPTION = "description";
+  private static final String METRIC_UNIT = "1";
+  private static final List<LabelKey> LABEL_KEY =
+      Collections.singletonList(LabelKey.create("key", "key description"));
+  private static final List<LabelValue> LABEL_VALUES =
+      Collections.singletonList(LabelValue.create("value"));
+  private static final List<LabelValue> LABEL_VALUES1 =
+      Collections.singletonList(LabelValue.create("value1"));
+  private static final List<LabelValue> DEFAULT_LABEL_VALUES =
+      Collections.singletonList(UNSET_VALUE);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
+
+  private static final Timestamp START_TIME = Timestamp.create(60, 0);
+  private static final Duration ONE_MINUTE = Duration.create(60, 0);
+  private final TestClock testClock = TestClock.create();
+  private static final MetricDescriptor METRIC_DESCRIPTOR =
+      MetricDescriptor.create(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, LABEL_KEY);
+  private final LongCumulativeImpl longCumulativeMetric =
+      new LongCumulativeImpl(
+          METRIC_NAME,
+          METRIC_DESCRIPTION,
+          METRIC_UNIT,
+          LABEL_KEY,
+          EMPTY_CONSTANT_LABELS,
+          START_TIME);
+
+  @Before
+  public void setUp() {
+    testClock.setTime(START_TIME);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    longCumulativeMetric.getOrCreateTimeSeries(null);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithNullElement() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
+
+    LongCumulativeImpl longCumulative =
+        new LongCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValue");
+    longCumulative.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_WithInvalidLabelSize() {
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
+    longCumulativeMetric.getOrCreateTimeSeries(labelValues);
+  }
+
+  @Test
+  public void getOrCreateTimeSeries() {
+    LongPoint point = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point.add(100);
+    LongPoint point1 = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point1.add(500);
+    assertThat(point).isSameInstanceAs(point1);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = longCumulativeMetric.getMetric(testClock);
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.longValue(600), endTime), START_TIME)));
+  }
+
+  @Test
+  public void getOrCreateTimeSeries_IgnoreNegativePointValues() {
+    LongPoint point = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    point.add(-100);
+    point.add(25);
+    point.add(-33);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Metric metric = longCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getPoints().size()).isEqualTo(1);
+    assertThat(metric.getTimeSeriesList().get(0).getPoints().get(0).getValue())
+        .isEqualTo(Value.longValue(25));
+  }
+
+  @Test
+  public void getDefaultTimeSeries() {
+    LongPoint point = longCumulativeMetric.getDefaultTimeSeries();
+    point.add(100);
+
+    LongPoint point1 = longCumulativeMetric.getDefaultTimeSeries();
+    point1.add(-100);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    Metric metric = longCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric)
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    DEFAULT_LABEL_VALUES,
+                    Point.create(Value.longValue(100), endTime),
+                    START_TIME)));
+    assertThat(point).isSameInstanceAs(point1);
+  }
+
+  @Test
+  public void removeTimeSeries() {
+    longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    assertThat(longCumulativeMetric.getMetric(testClock))
+        .isEqualTo(
+            Metric.createWithOneTimeSeries(
+                METRIC_DESCRIPTOR,
+                TimeSeries.createWithOnePoint(
+                    LABEL_VALUES, Point.create(Value.longValue(0), START_TIME), START_TIME)));
+
+    longCumulativeMetric.removeTimeSeries(LABEL_VALUES);
+    assertThat(longCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void removeTimeSeries_WithNullLabelValues() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("labelValues");
+    longCumulativeMetric.removeTimeSeries(null);
+  }
+
+  @Test
+  public void clear() {
+    LongPoint longPoint = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    longPoint.add(100);
+
+    Metric metric = longCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(1);
+
+    longCumulativeMetric.clear();
+    assertThat(longCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    LongCumulativeImpl longCumulative =
+        new LongCumulativeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels, START_TIME);
+
+    LongPoint longPoint = longCumulative.getOrCreateTimeSeries(labelValues);
+    longPoint.add(1);
+    longPoint.add(2);
+
+    LongPoint defaultPoint = longCumulative.getDefaultTimeSeries();
+    defaultPoint.add(100);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.CUMULATIVE_INT64, allKeys);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    TimeSeries defaultTimeSeries =
+        TimeSeries.createWithOnePoint(
+            Arrays.asList(UNSET_VALUE, UNSET_VALUE, constantValue),
+            Point.create(Value.longValue(100), endTime),
+            START_TIME);
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.longValue(3), endTime), START_TIME));
+    expectedTimeSeriesList.add(defaultTimeSeries);
+
+    Metric metric = longCumulative.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+
+    longCumulative.removeTimeSeries(labelValues);
+    Metric metric2 = longCumulative.getMetric(testClock);
+    assertThat(metric2).isNotNull();
+    assertThat(metric2.getTimeSeriesList()).containsExactly(defaultTimeSeries);
+  }
+
+  @Test
+  public void pointImpl_InstanceOf() {
+    LongPoint longPoint = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    assertThat(longPoint).isInstanceOf(LongCumulativeImpl.PointImpl.class);
+  }
+
+  @Test
+  public void multipleMetrics_GetMetric() {
+    LongPoint longPoint = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES);
+    longPoint.add(1);
+    longPoint.add(2);
+
+    LongPoint defaultPoint = longCumulativeMetric.getDefaultTimeSeries();
+    defaultPoint.add(100);
+
+    LongPoint longPoint1 = longCumulativeMetric.getOrCreateTimeSeries(LABEL_VALUES1);
+    longPoint1.add(-100);
+    longPoint1.add(-20);
+
+    testClock.advanceTime(ONE_MINUTE);
+    Timestamp endTime = testClock.now();
+
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES, Point.create(Value.longValue(3), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            DEFAULT_LABEL_VALUES, Point.create(Value.longValue(100), endTime), START_TIME));
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            LABEL_VALUES1, Point.create(Value.longValue(0), endTime), START_TIME));
+
+    Metric metric = longCumulativeMetric.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(METRIC_DESCRIPTOR);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(3);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+  }
+
+  @Test
+  public void empty_GetMetrics() {
+    assertThat(longCumulativeMetric.getMetric(testClock)).isNull();
+  }
+
+  @Test
+  public void testEquals() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+
+    LongCumulativeImpl longCumulative =
+        new LongCumulativeImpl(
+            METRIC_NAME,
+            METRIC_DESCRIPTION,
+            METRIC_UNIT,
+            labelKeys,
+            EMPTY_CONSTANT_LABELS,
+            START_TIME);
+
+    LongPoint defaultPoint1 = longCumulative.getDefaultTimeSeries();
+    LongPoint defaultPoint2 = longCumulative.getDefaultTimeSeries();
+    LongPoint longPoint1 = longCumulative.getOrCreateTimeSeries(labelValues);
+    LongPoint longPoint2 = longCumulative.getOrCreateTimeSeries(labelValues);
+
+    new EqualsTester()
+        .addEqualityGroup(defaultPoint1, defaultPoint2)
+        .addEqualityGroup(longPoint1, longPoint2)
+        .testEquals();
+
+    longCumulative.clear();
+
+    LongPoint newDefaultPointAfterClear = longCumulative.getDefaultTimeSeries();
+    LongPoint newLongPointAfterClear = longCumulative.getOrCreateTimeSeries(labelValues);
+
+    longCumulative.removeTimeSeries(labelValues);
+    LongPoint newLongPointAfterRemove = longCumulative.getOrCreateTimeSeries(labelValues);
+
+    new EqualsTester()
+        .addEqualityGroup(defaultPoint1, defaultPoint2)
+        .addEqualityGroup(longPoint1, longPoint2)
+        .addEqualityGroup(newDefaultPointAfterClear)
+        .addEqualityGroup(newLongPointAfterClear)
+        .addEqualityGroup(newLongPointAfterRemove)
+        .testEquals();
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/LongGaugeImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/LongGaugeImplTest.java
index e83bb64..27e75f5 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/LongGaugeImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/LongGaugeImplTest.java
@@ -35,6 +35,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -57,6 +58,8 @@
       Collections.singletonList(LabelValue.create("value1"));
   private static final List<LabelValue> DEFAULT_LABEL_VALUES =
       Collections.singletonList(UNSET_VALUE);
+  private static final Map<LabelKey, LabelValue> EMPTY_CONSTANT_LABELS =
+      Collections.<LabelKey, LabelValue>emptyMap();
 
   private static final Timestamp TEST_TIME = Timestamp.create(1234, 123);
   private final TestClock testClock = TestClock.create(TEST_TIME);
@@ -64,7 +67,8 @@
       MetricDescriptor.create(
           METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_INT64, LABEL_KEY);
   private final LongGaugeImpl longGaugeMetric =
-      new LongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY);
+      new LongGaugeImpl(
+          METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, LABEL_KEY, EMPTY_CONSTANT_LABELS);
 
   @Test
   public void getOrCreateTimeSeries_WithNullLabelValues() {
@@ -80,9 +84,10 @@
     List<LabelValue> labelValues = Arrays.asList(LabelValue.create("value1"), null);
 
     LongGaugeImpl longGauge =
-        new LongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new LongGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelValue element should not be null.");
+    thrown.expectMessage("labelValue");
     longGauge.getOrCreateTimeSeries(labelValues);
   }
 
@@ -92,7 +97,7 @@
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Incorrect number of labels.");
+    thrown.expectMessage("Label Keys and Label Values don't have same size.");
     longGaugeMetric.getOrCreateTimeSeries(labelValues);
   }
 
@@ -111,7 +116,7 @@
                 METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
                     LABEL_VALUES, Point.create(Value.longValue(500), TEST_TIME), null)));
-    assertThat(point).isSameAs(point1);
+    assertThat(point).isSameInstanceAs(point1);
   }
 
   @Test
@@ -149,7 +154,7 @@
                 METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
                     DEFAULT_LABEL_VALUES, Point.create(Value.longValue(400), TEST_TIME), null)));
-    assertThat(point).isSameAs(point1);
+    assertThat(point).isSameInstanceAs(point1);
   }
 
   @Test
@@ -194,7 +199,8 @@
     List<LabelKey> labelKeys =
         Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
     LongGaugeImpl longGauge =
-        new LongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new LongGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
     LongPoint defaultPoint = longGauge.getDefaultTimeSeries();
     defaultPoint.set(-230);
 
@@ -207,6 +213,57 @@
   }
 
   @Test
+  public void withConstantLabels() {
+    List<LabelKey> labelKeys =
+        Arrays.asList(LabelKey.create("key1", "desc"), LabelKey.create("key2", "desc"));
+    List<LabelValue> labelValues =
+        Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
+    LabelKey constantKey = LabelKey.create("constant_key", "desc");
+    LabelValue constantValue = LabelValue.create("constant_value");
+    Map<LabelKey, LabelValue> constantLabels =
+        Collections.<LabelKey, LabelValue>singletonMap(constantKey, constantValue);
+    LongGaugeImpl longGauge =
+        new LongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, constantLabels);
+
+    LongPoint longPoint = longGauge.getOrCreateTimeSeries(labelValues);
+    longPoint.add(1);
+    longPoint.add(2);
+
+    LongPoint defaultPoint = longGauge.getDefaultTimeSeries();
+    defaultPoint.set(100);
+
+    List<LabelKey> allKeys = new ArrayList<>(labelKeys);
+    allKeys.add(constantKey);
+    MetricDescriptor expectedDescriptor =
+        MetricDescriptor.create(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, Type.GAUGE_INT64, allKeys);
+
+    List<LabelValue> allValues = new ArrayList<>(labelValues);
+    allValues.add(constantValue);
+    List<TimeSeries> expectedTimeSeriesList = new ArrayList<TimeSeries>();
+    TimeSeries defaultTimeSeries =
+        TimeSeries.createWithOnePoint(
+            Arrays.asList(UNSET_VALUE, UNSET_VALUE, constantValue),
+            Point.create(Value.longValue(100), TEST_TIME),
+            null);
+    expectedTimeSeriesList.add(
+        TimeSeries.createWithOnePoint(
+            allValues, Point.create(Value.longValue(3), TEST_TIME), null));
+    expectedTimeSeriesList.add(defaultTimeSeries);
+
+    Metric metric = longGauge.getMetric(testClock);
+    assertThat(metric).isNotNull();
+    assertThat(metric.getMetricDescriptor()).isEqualTo(expectedDescriptor);
+    assertThat(metric.getTimeSeriesList().size()).isEqualTo(2);
+    assertThat(metric.getTimeSeriesList()).containsExactlyElementsIn(expectedTimeSeriesList);
+
+    longGauge.removeTimeSeries(labelValues);
+    Metric metric2 = longGauge.getMetric(testClock);
+    assertThat(metric2).isNotNull();
+    assertThat(metric2.getTimeSeriesList()).containsExactly(defaultTimeSeries);
+  }
+
+  @Test
   public void pointImpl_InstanceOf() {
     LongPoint longPoint = longGaugeMetric.getOrCreateTimeSeries(LABEL_VALUES);
     assertThat(longPoint).isInstanceOf(LongGaugeImpl.PointImpl.class);
@@ -256,7 +313,8 @@
         Arrays.asList(LabelValue.create("value1"), LabelValue.create("value2"));
 
     LongGaugeImpl longGauge =
-        new LongGaugeImpl(METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys);
+        new LongGaugeImpl(
+            METRIC_NAME, METRIC_DESCRIPTION, METRIC_UNIT, labelKeys, EMPTY_CONSTANT_LABELS);
 
     LongPoint defaultPoint1 = longGauge.getDefaultTimeSeries();
     LongPoint defaultPoint2 = longGauge.getDefaultTimeSeries();
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/MetricRegistryImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/MetricRegistryImplTest.java
index 68bfda3..ff9c5c8 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/MetricRegistryImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/MetricRegistryImplTest.java
@@ -29,6 +29,7 @@
 import io.opencensus.metrics.LabelValue;
 import io.opencensus.metrics.LongGauge;
 import io.opencensus.metrics.LongGauge.LongPoint;
+import io.opencensus.metrics.MetricOptions;
 import io.opencensus.metrics.export.Metric;
 import io.opencensus.metrics.export.MetricDescriptor;
 import io.opencensus.metrics.export.MetricDescriptor.Type;
@@ -36,9 +37,11 @@
 import io.opencensus.metrics.export.TimeSeries;
 import io.opencensus.metrics.export.Value;
 import io.opencensus.testing.common.TestClock;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -50,29 +53,42 @@
 public class MetricRegistryImplTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
 
-  private static final String NAME = "name";
-  private static final String NAME_2 = "name2";
-  private static final String NAME_3 = "name3";
-  private static final String NAME_4 = "name4";
-  private static final String DESCRIPTION = "description";
+  private static final String NAME = "test_name";
+  private static final String NAME_2 = "test_name2";
+  private static final String NAME_3 = "test_name3";
+  private static final String NAME_4 = "test_name4";
+  private static final String DESCRIPTION = "test_description";
   private static final String UNIT = "1";
-  private static final List<LabelKey> LABEL_KEY =
-      Collections.singletonList(LabelKey.create("key", "key description"));
-  private static final List<LabelValue> LABEL_VALUES =
-      Collections.singletonList(LabelValue.create("value"));
+  private static final LabelKey LABEL_KEY = LabelKey.create("test_key", "test key description");
+  private static final LabelKey LABEL_KEY_2 = LabelKey.create("constant_key", "constant label key");
+  private static final List<LabelKey> LABEL_KEYS = Collections.singletonList(LABEL_KEY);
+  private static final List<LabelKey> ALL_KEYS = Arrays.asList(LABEL_KEY, LABEL_KEY_2);
+  private static final LabelValue LABEL_VALUE = LabelValue.create("test_value");
+  private static final LabelValue LABEL_VALUE_2 = LabelValue.create("constant_value");
+  private static final List<LabelValue> LABEL_VALUES = Collections.singletonList(LABEL_VALUE);
+  private static final List<LabelValue> ALL_VALUES = Arrays.asList(LABEL_VALUE, LABEL_VALUE_2);
+  private static final Map<LabelKey, LabelValue> CONSTANT_LABELS =
+      Collections.singletonMap(LABEL_KEY_2, LABEL_VALUE_2);
+  private static final MetricOptions METRIC_OPTIONS =
+      MetricOptions.builder()
+          .setDescription(DESCRIPTION)
+          .setUnit(UNIT)
+          .setLabelKeys(LABEL_KEYS)
+          .setConstantLabels(CONSTANT_LABELS)
+          .build();
 
   private static final Timestamp TEST_TIME = Timestamp.create(1234, 123);
   private final TestClock testClock = TestClock.create(TEST_TIME);
   private final MetricRegistryImpl metricRegistry = new MetricRegistryImpl(testClock);
 
   private static final MetricDescriptor LONG_METRIC_DESCRIPTOR =
-      MetricDescriptor.create(NAME, DESCRIPTION, UNIT, Type.GAUGE_INT64, LABEL_KEY);
+      MetricDescriptor.create(NAME, DESCRIPTION, UNIT, Type.GAUGE_INT64, ALL_KEYS);
   private static final MetricDescriptor DOUBLE_METRIC_DESCRIPTOR =
-      MetricDescriptor.create(NAME_2, DESCRIPTION, UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
+      MetricDescriptor.create(NAME_2, DESCRIPTION, UNIT, Type.GAUGE_DOUBLE, ALL_KEYS);
   private static final MetricDescriptor DERIVED_LONG_METRIC_DESCRIPTOR =
-      MetricDescriptor.create(NAME_3, DESCRIPTION, UNIT, Type.GAUGE_INT64, LABEL_KEY);
+      MetricDescriptor.create(NAME_3, DESCRIPTION, UNIT, Type.GAUGE_INT64, ALL_KEYS);
   private static final MetricDescriptor DERIVED_DOUBLE_METRIC_DESCRIPTOR =
-      MetricDescriptor.create(NAME_4, DESCRIPTION, UNIT, Type.GAUGE_DOUBLE, LABEL_KEY);
+      MetricDescriptor.create(NAME_4, DESCRIPTION, UNIT, Type.GAUGE_DOUBLE, ALL_KEYS);
 
   private static final ToLongFunction<Object> longFunction =
       new ToLongFunction<Object>() {
@@ -93,149 +109,33 @@
   public void addLongGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addLongGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addLongGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addLongGauge(NAME, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addLongGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void addLongGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void addLongGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addLongGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void addDoubleGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDoubleGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDoubleGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDoubleGauge(NAME_2, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDoubleGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void addDoubleGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void addDoubleGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addDoubleGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void addDerivedLongGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDerivedLongGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedLongGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDerivedLongGauge(NAME_3, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedLongGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedLongGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void addDerivedLongGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addDerivedLongGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void addDerivedDoubleGauge_NullName() {
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("name");
-    metricRegistry.addDerivedDoubleGauge(null, DESCRIPTION, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedDoubleGauge_NullDescription() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("description");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, null, UNIT, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedDoubleGauge_NullUnit() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("unit");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, null, LABEL_KEY);
-  }
-
-  @Test
-  public void addDerivedDoubleGauge_NullLabels() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKeys");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, null);
-  }
-
-  @Test
-  public void addDerivedDoubleGauge_WithNullElement() {
-    List<LabelKey> labelKeys = Collections.singletonList(null);
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("labelKey element should not be null.");
-    metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, labelKeys);
+    metricRegistry.addDerivedDoubleGauge(null, METRIC_OPTIONS);
   }
 
   @Test
   public void addLongGauge_GetMetrics() {
-    LongGauge longGauge = metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    LongGauge longGauge = metricRegistry.addLongGauge(NAME, METRIC_OPTIONS);
     longGauge.getOrCreateTimeSeries(LABEL_VALUES);
 
     Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
@@ -245,12 +145,12 @@
             Metric.createWithOneTimeSeries(
                 LONG_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.longValue(0), TEST_TIME), null)));
+                    ALL_VALUES, Point.create(Value.longValue(0), TEST_TIME), null)));
   }
 
   @Test
   public void addDoubleGauge_GetMetrics() {
-    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY);
+    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, METRIC_OPTIONS);
     doubleGauge.getOrCreateTimeSeries(LABEL_VALUES);
     Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
     assertThat(metricCollections.size()).isEqualTo(1);
@@ -259,13 +159,12 @@
             Metric.createWithOneTimeSeries(
                 DOUBLE_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.doubleValue(0.0), TEST_TIME), null)));
+                    ALL_VALUES, Point.create(Value.doubleValue(0.0), TEST_TIME), null)));
   }
 
   @Test
   public void addDerivedLongGauge_GetMetrics() {
-    DerivedLongGauge derivedLongGauge =
-        metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEY);
+    DerivedLongGauge derivedLongGauge = metricRegistry.addDerivedLongGauge(NAME_3, METRIC_OPTIONS);
     derivedLongGauge.createTimeSeries(LABEL_VALUES, null, longFunction);
     Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
     assertThat(metricCollections.size()).isEqualTo(1);
@@ -274,13 +173,13 @@
             Metric.createWithOneTimeSeries(
                 DERIVED_LONG_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.longValue(5), TEST_TIME), null)));
+                    ALL_VALUES, Point.create(Value.longValue(5), TEST_TIME), null)));
   }
 
   @Test
   public void addDerivedDoubleGauge_GetMetrics() {
     DerivedDoubleGauge derivedDoubleGauge =
-        metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEY);
+        metricRegistry.addDerivedDoubleGauge(NAME_4, METRIC_OPTIONS);
     derivedDoubleGauge.createTimeSeries(LABEL_VALUES, null, doubleFunction);
     Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
     assertThat(metricCollections.size()).isEqualTo(1);
@@ -289,7 +188,7 @@
             Metric.createWithOneTimeSeries(
                 DERIVED_DOUBLE_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.doubleValue(5.0), TEST_TIME), null)));
+                    ALL_VALUES, Point.create(Value.doubleValue(5.0), TEST_TIME), null)));
   }
 
   @Test
@@ -299,29 +198,27 @@
 
   @Test
   public void checkInstanceOf() {
-    assertThat(metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY))
-        .isInstanceOf(LongGaugeImpl.class);
-    assertThat(metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY))
+    assertThat(metricRegistry.addLongGauge(NAME, METRIC_OPTIONS)).isInstanceOf(LongGaugeImpl.class);
+    assertThat(metricRegistry.addDoubleGauge(NAME_2, METRIC_OPTIONS))
         .isInstanceOf(DoubleGaugeImpl.class);
-    assertThat(metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEY))
+    assertThat(metricRegistry.addDerivedLongGauge(NAME_3, METRIC_OPTIONS))
         .isInstanceOf(DerivedLongGaugeImpl.class);
-    assertThat(metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEY))
+    assertThat(metricRegistry.addDerivedDoubleGauge(NAME_4, METRIC_OPTIONS))
         .isInstanceOf(DerivedDoubleGaugeImpl.class);
   }
 
   @Test
   public void getMetrics() {
-    LongGauge longGauge = metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    LongGauge longGauge = metricRegistry.addLongGauge(NAME, METRIC_OPTIONS);
     LongPoint longPoint = longGauge.getOrCreateTimeSeries(LABEL_VALUES);
     longPoint.set(200);
-    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, DESCRIPTION, UNIT, LABEL_KEY);
+    DoubleGauge doubleGauge = metricRegistry.addDoubleGauge(NAME_2, METRIC_OPTIONS);
     DoublePoint doublePoint = doubleGauge.getOrCreateTimeSeries(LABEL_VALUES);
     doublePoint.set(-300.13);
-    DerivedLongGauge derivedLongGauge =
-        metricRegistry.addDerivedLongGauge(NAME_3, DESCRIPTION, UNIT, LABEL_KEY);
+    DerivedLongGauge derivedLongGauge = metricRegistry.addDerivedLongGauge(NAME_3, METRIC_OPTIONS);
     derivedLongGauge.createTimeSeries(LABEL_VALUES, null, longFunction);
     DerivedDoubleGauge derivedDoubleGauge =
-        metricRegistry.addDerivedDoubleGauge(NAME_4, DESCRIPTION, UNIT, LABEL_KEY);
+        metricRegistry.addDerivedDoubleGauge(NAME_4, METRIC_OPTIONS);
     derivedDoubleGauge.createTimeSeries(LABEL_VALUES, null, doubleFunction);
 
     Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
@@ -331,26 +228,63 @@
             Metric.createWithOneTimeSeries(
                 LONG_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.longValue(200), TEST_TIME), null)),
+                    ALL_VALUES, Point.create(Value.longValue(200), TEST_TIME), null)),
             Metric.createWithOneTimeSeries(
                 DOUBLE_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.doubleValue(-300.13), TEST_TIME), null)),
+                    ALL_VALUES, Point.create(Value.doubleValue(-300.13), TEST_TIME), null)),
             Metric.createWithOneTimeSeries(
                 DERIVED_LONG_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.longValue(5), TEST_TIME), null)),
+                    ALL_VALUES, Point.create(Value.longValue(5), TEST_TIME), null)),
             Metric.createWithOneTimeSeries(
                 DERIVED_DOUBLE_METRIC_DESCRIPTOR,
                 TimeSeries.createWithOnePoint(
-                    LABEL_VALUES, Point.create(Value.doubleValue(5.0), TEST_TIME), null)));
+                    ALL_VALUES, Point.create(Value.doubleValue(5.0), TEST_TIME), null)));
   }
 
   @Test
-  public void registerDifferentMetricSameName() {
-    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+  public void shouldReturnSameObjectOnMultipleRegisterCall() {
+    LongGauge longGauge = metricRegistry.addLongGauge(NAME, METRIC_OPTIONS);
+    LongPoint longPoint = longGauge.getOrCreateTimeSeries(LABEL_VALUES);
+    longPoint.set(200);
+
+    LongGauge longGauge1 = metricRegistry.addLongGauge(NAME, METRIC_OPTIONS);
+    LongPoint longPoint1 =
+        longGauge1.getOrCreateTimeSeries(Collections.singletonList(LABEL_VALUE_2));
+    longPoint1.set(300);
+
+    assertThat(longGauge).isEqualTo(longGauge1);
+    Collection<Metric> metricCollections = metricRegistry.getMetricProducer().getMetrics();
+    assertThat(metricCollections.size()).isEqualTo(1);
+    assertThat(metricCollections)
+        .containsExactly(
+            Metric.create(
+                LONG_METRIC_DESCRIPTOR,
+                Arrays.asList(
+                    TimeSeries.createWithOnePoint(
+                        Arrays.asList(LABEL_VALUE, LABEL_VALUE_2),
+                        Point.create(Value.longValue(200), TEST_TIME),
+                        null),
+                    TimeSeries.createWithOnePoint(
+                        Arrays.asList(LABEL_VALUE_2, LABEL_VALUE_2),
+                        Point.create(Value.longValue(300), TEST_TIME),
+                        null))));
+  }
+
+  @Test
+  public void shouldThrowWhenRegisterExistingMetricWithDiffType() {
+    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEYS);
     thrown.expect(IllegalArgumentException.class);
     thrown.expectMessage("A different metric with the same name already registered.");
-    metricRegistry.addDoubleGauge(NAME, DESCRIPTION, UNIT, LABEL_KEY);
+    metricRegistry.addDoubleGauge(NAME, DESCRIPTION, UNIT, LABEL_KEYS);
+  }
+
+  @Test
+  public void shouldThrowWhenRegisterExistingMetricWithDiffDesc() {
+    metricRegistry.addLongGauge(NAME, DESCRIPTION, UNIT, LABEL_KEYS);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("A different metric with the same name already registered.");
+    metricRegistry.addLongGauge(NAME, "duplicate", UNIT, LABEL_KEYS);
   }
 }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/metrics/export/MetricProducerManagerImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/metrics/export/MetricProducerManagerImplTest.java
index e549dad..841aa5e 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/metrics/export/MetricProducerManagerImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/metrics/export/MetricProducerManagerImplTest.java
@@ -64,7 +64,7 @@
     assertThat(metricProducerSet).containsExactly(metricProducer);
     metricProducerManager.add(metricProducer);
     // Returns the same object.
-    assertThat(metricProducerManager.getAllMetricProducer()).isSameAs(metricProducerSet);
+    assertThat(metricProducerManager.getAllMetricProducer()).isSameInstanceAs(metricProducerSet);
   }
 
   @Test
@@ -105,7 +105,7 @@
     assertThat(metricProducerSet).containsExactly(metricProducer);
     metricProducerManager.remove(metricProducerOther);
     // Returns the same object.
-    assertThat(metricProducerManager.getAllMetricProducer()).isSameAs(metricProducerSet);
+    assertThat(metricProducerManager.getAllMetricProducer()).isSameInstanceAs(metricProducerSet);
   }
 
   @Test
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/IntervalBucketTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/IntervalBucketTest.java
index 39a53e1..fd9a147 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/IntervalBucketTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/IntervalBucketTest.java
@@ -21,6 +21,7 @@
 import io.opencensus.common.Duration;
 import io.opencensus.common.Timestamp;
 import io.opencensus.implcore.stats.MutableAggregation.MutableMean;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Aggregation.Mean;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.tags.TagValue;
@@ -93,9 +94,9 @@
     IntervalBucket bucket = new IntervalBucket(START, MINUTE, MEAN, MEASURE_DOUBLE);
     List<TagValue> tagValues1 = Arrays.<TagValue>asList(TagValue.create("VALUE1"));
     List<TagValue> tagValues2 = Arrays.<TagValue>asList(TagValue.create("VALUE2"));
-    bucket.record(tagValues1, 5.0, Collections.<String, String>emptyMap(), START);
-    bucket.record(tagValues1, 15.0, Collections.<String, String>emptyMap(), START);
-    bucket.record(tagValues2, 10.0, Collections.<String, String>emptyMap(), START);
+    bucket.record(tagValues1, 5.0, Collections.<String, AttachmentValue>emptyMap(), START);
+    bucket.record(tagValues1, 15.0, Collections.<String, AttachmentValue>emptyMap(), START);
+    bucket.record(tagValues2, 10.0, Collections.<String, AttachmentValue>emptyMap(), START);
     assertThat(bucket.getTagValueAggregationMap().keySet()).containsExactly(tagValues1, tagValues2);
     MutableMean mutableMean1 = (MutableMean) bucket.getTagValueAggregationMap().get(tagValues1);
     MutableMean mutableMean2 = (MutableMean) bucket.getTagValueAggregationMap().get(tagValues2);
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/MeasureMapInternalTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/MeasureMapInternalTest.java
index 19e8a6c..079c67d 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/MeasureMapInternalTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/MeasureMapInternalTest.java
@@ -19,6 +19,8 @@
 import static com.google.common.truth.Truth.assertThat;
 
 import com.google.common.collect.Lists;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
 import io.opencensus.stats.Measure;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
@@ -34,6 +36,10 @@
 @RunWith(JUnit4.class)
 public class MeasureMapInternalTest {
 
+  private static final AttachmentValue ATTACHMENT_VALUE_1 = AttachmentValueString.create("v1");
+  private static final AttachmentValue ATTACHMENT_VALUE_2 = AttachmentValueString.create("v2");
+  private static final AttachmentValue ATTACHMENT_VALUE_3 = AttachmentValueString.create("v3");
+
   @Test
   public void testPutDouble() {
     MeasureMapInternal metrics = MeasureMapInternal.builder().put(M1, 44.4).build();
@@ -50,11 +56,12 @@
   public void testPutAttachment() {
     MeasureMapInternal metrics =
         MeasureMapInternal.builder()
-            .putAttachment("k1", "v1")
-            .putAttachment("k2", "v2")
-            .putAttachment("k1", "v3")
+            .putAttachment("k1", ATTACHMENT_VALUE_1)
+            .putAttachment("k2", ATTACHMENT_VALUE_2)
+            .putAttachment("k1", ATTACHMENT_VALUE_3)
             .build();
-    assertThat(metrics.getAttachments()).containsExactly("k1", "v3", "k2", "v2");
+    assertThat(metrics.getAttachments())
+        .containsExactly("k1", ATTACHMENT_VALUE_3, "k2", ATTACHMENT_VALUE_2);
     assertContains(metrics);
   }
 
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/MetricUtilsTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/MetricUtilsTest.java
index 66e971f..f5a35d7 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/MetricUtilsTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/MetricUtilsTest.java
@@ -19,7 +19,6 @@
 import static com.google.common.truth.Truth.assertThat;
 
 import io.opencensus.common.Duration;
-import io.opencensus.common.Timestamp;
 import io.opencensus.metrics.LabelKey;
 import io.opencensus.metrics.LabelValue;
 import io.opencensus.metrics.export.MetricDescriptor;
@@ -82,7 +81,9 @@
           MEAN,
           Collections.singletonList(KEY),
           INTERVAL);
-  private static final Timestamp TIMESTAMP = Timestamp.fromMillis(1000);
+  private static final View VIEW_3 =
+      View.create(
+          VIEW_NAME, VIEW_DESCRIPTION, MEASURE_DOUBLE, COUNT, Collections.singletonList(KEY));
 
   @Test
   public void viewToMetricDescriptor() {
@@ -96,6 +97,17 @@
   }
 
   @Test
+  public void viewToMetricDescriptor_Count() {
+    MetricDescriptor metricDescriptor = MetricUtils.viewToMetricDescriptor(VIEW_3);
+    assertThat(metricDescriptor).isNotNull();
+    assertThat(metricDescriptor.getName()).isEqualTo(VIEW_NAME.asString());
+    assertThat(metricDescriptor.getUnit()).isEqualTo(MetricUtils.COUNT_UNIT);
+    assertThat(metricDescriptor.getType()).isEqualTo(Type.CUMULATIVE_INT64);
+    assertThat(metricDescriptor.getDescription()).isEqualTo(VIEW_DESCRIPTION);
+    assertThat(metricDescriptor.getLabelKeys()).containsExactly(LabelKey.create(KEY.getName(), ""));
+  }
+
+  @Test
   public void viewToMetricDescriptor_NoIntervalViews() {
     MetricDescriptor metricDescriptor = MetricUtils.viewToMetricDescriptor(VIEW_2);
     assertThat(metricDescriptor).isNull();
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/MutableAggregationTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/MutableAggregationTest.java
index a6139e5..33be3f3 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/MutableAggregationTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/MutableAggregationTest.java
@@ -28,6 +28,9 @@
 import io.opencensus.implcore.stats.MutableAggregation.MutableMean;
 import io.opencensus.implcore.stats.MutableAggregation.MutableSumDouble;
 import io.opencensus.implcore.stats.MutableAggregation.MutableSumLong;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.metrics.export.Distribution;
 import io.opencensus.metrics.export.Distribution.Bucket;
 import io.opencensus.metrics.export.Distribution.BucketOptions;
@@ -36,7 +39,6 @@
 import io.opencensus.stats.AggregationData;
 import io.opencensus.stats.AggregationData.CountData;
 import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.DistributionData.Exemplar;
 import io.opencensus.stats.AggregationData.LastValueDataDouble;
 import io.opencensus.stats.AggregationData.LastValueDataLong;
 import io.opencensus.stats.AggregationData.MeanData;
@@ -65,6 +67,11 @@
   private static final BucketBoundaries BUCKET_BOUNDARIES_EMPTY =
       BucketBoundaries.create(Collections.<Double>emptyList());
   private static final Timestamp TIMESTAMP = Timestamp.create(60, 0);
+  private static final AttachmentValue ATTACHMENT_VALUE_1 = AttachmentValueString.create("v1");
+  private static final AttachmentValue ATTACHMENT_VALUE_2 = AttachmentValueString.create("v2");
+  private static final AttachmentValue ATTACHMENT_VALUE_3 = AttachmentValueString.create("v3");
+  private static final AttachmentValue ATTACHMENT_VALUE_4 = AttachmentValueString.create("v4");
+  private static final AttachmentValue ATTACHMENT_VALUE_5 = AttachmentValueString.create("v5");
 
   @Test
   public void testCreateEmpty() {
@@ -79,8 +86,6 @@
     MutableDistribution mutableDistribution = MutableDistribution.create(bucketBoundaries);
     assertThat(mutableDistribution.getMean()).isWithin(TOLERANCE).of(0);
     assertThat(mutableDistribution.getCount()).isEqualTo(0);
-    assertThat(mutableDistribution.getMin()).isPositiveInfinity();
-    assertThat(mutableDistribution.getMax()).isNegativeInfinity();
     assertThat(mutableDistribution.getSumOfSquaredDeviations()).isWithin(TOLERANCE).of(0);
     assertThat(mutableDistribution.getBucketCounts()).isEqualTo(new long[4]);
     assertThat(mutableDistribution.getExemplars()).isEqualTo(new Exemplar[4]);
@@ -99,8 +104,8 @@
 
   @Test
   public void testNoBoundaries() {
-    List<Double> buckets = Arrays.asList();
-    MutableDistribution noBoundaries = MutableDistribution.create(BucketBoundaries.create(buckets));
+    MutableDistribution noBoundaries =
+        MutableDistribution.create(BucketBoundaries.create(Collections.<Double>emptyList()));
     assertThat(noBoundaries.getBucketCounts().length).isEqualTo(1);
     assertThat(noBoundaries.getBucketCounts()[0]).isEqualTo(0);
   }
@@ -121,7 +126,7 @@
 
     for (double value : values) {
       for (MutableAggregation aggregation : aggregations) {
-        aggregation.add(value, Collections.<String, String>emptyMap(), TIMESTAMP);
+        aggregation.add(value, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
       }
     }
 
@@ -139,8 +144,7 @@
         TOLERANCE);
     assertAggregationDataEquals(
         aggregations.get(4).toAggregationData(),
-        AggregationData.DistributionData.create(
-            4.0, 5, -5.0, 20.0, 372, Arrays.asList(0L, 2L, 2L, 1L)),
+        AggregationData.DistributionData.create(4.0, 5, 372, Arrays.asList(4L, 1L)),
         TOLERANCE);
     assertAggregationDataEquals(
         aggregations.get(5).toAggregationData(),
@@ -158,13 +162,13 @@
     MutableDistribution mutableDistributionNoHistogram =
         MutableDistribution.create(BUCKET_BOUNDARIES_EMPTY);
     List<Double> values = Arrays.asList(-1.0, 1.0, -5.0, 20.0, 5.0);
-    List<Map<String, String>> attachmentsList =
-        ImmutableList.<Map<String, String>>of(
-            Collections.<String, String>singletonMap("k1", "v1"),
-            Collections.<String, String>singletonMap("k2", "v2"),
-            Collections.<String, String>singletonMap("k3", "v3"),
-            Collections.<String, String>singletonMap("k4", "v4"),
-            Collections.<String, String>singletonMap("k5", "v5"));
+    List<Map<String, AttachmentValue>> attachmentsList =
+        ImmutableList.<Map<String, AttachmentValue>>of(
+            Collections.<String, AttachmentValue>singletonMap("k1", ATTACHMENT_VALUE_1),
+            Collections.<String, AttachmentValue>singletonMap("k2", ATTACHMENT_VALUE_2),
+            Collections.<String, AttachmentValue>singletonMap("k3", ATTACHMENT_VALUE_3),
+            Collections.<String, AttachmentValue>singletonMap("k4", ATTACHMENT_VALUE_4),
+            Collections.<String, AttachmentValue>singletonMap("k5", ATTACHMENT_VALUE_5));
     List<Timestamp> timestamps =
         Arrays.asList(
             Timestamp.fromMillis(500),
@@ -181,8 +185,6 @@
     // bucket, only the last one will be kept.
     List<Exemplar> expected =
         Arrays.<Exemplar>asList(
-            null,
-            Exemplar.create(values.get(2), timestamps.get(2), attachmentsList.get(2)),
             Exemplar.create(values.get(4), timestamps.get(4), attachmentsList.get(4)),
             Exemplar.create(values.get(3), timestamps.get(3), attachmentsList.get(3)));
     assertThat(mutableDistribution.getExemplars())
@@ -210,12 +212,12 @@
 
     for (double val : Arrays.asList(-1.0, -5.0)) {
       for (MutableAggregation aggregation : aggregations1) {
-        aggregation.add(val, Collections.<String, String>emptyMap(), TIMESTAMP);
+        aggregation.add(val, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
       }
     }
     for (double val : Arrays.asList(10.0, 50.0)) {
       for (MutableAggregation aggregation : aggregations2) {
-        aggregation.add(val, Collections.<String, String>emptyMap(), TIMESTAMP);
+        aggregation.add(val, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
       }
     }
 
@@ -246,25 +248,24 @@
     MutableDistribution distribution3 = MutableDistribution.create(BUCKET_BOUNDARIES);
 
     for (double val : Arrays.asList(5.0, -5.0)) {
-      distribution1.add(val, Collections.<String, String>emptyMap(), TIMESTAMP);
+      distribution1.add(val, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
     }
     for (double val : Arrays.asList(10.0, 20.0)) {
-      distribution2.add(val, Collections.<String, String>emptyMap(), TIMESTAMP);
+      distribution2.add(val, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
     }
     for (double val : Arrays.asList(-10.0, 15.0, -15.0, -20.0)) {
-      distribution3.add(val, Collections.<String, String>emptyMap(), TIMESTAMP);
+      distribution3.add(val, Collections.<String, AttachmentValue>emptyMap(), TIMESTAMP);
     }
 
     MutableDistribution combined = MutableDistribution.create(BUCKET_BOUNDARIES);
     combined.combine(distribution1, 1.0); // distribution1 will be combined
     combined.combine(distribution2, 0.6); // distribution2 will be ignored
-    verifyMutableDistribution(combined, 0, 2, -5, 5, 50.0, new long[] {0, 1, 1, 0}, TOLERANCE);
+    verifyMutableDistribution(combined, 0, 2, 50.0, new long[] {2, 0});
 
     combined.combine(distribution2, 1.0); // distribution2 will be combined
-    verifyMutableDistribution(combined, 7.5, 4, -5, 20, 325.0, new long[] {0, 1, 1, 2}, TOLERANCE);
-
+    verifyMutableDistribution(combined, 7.5, 4, 325.0, new long[] {2, 2});
     combined.combine(distribution3, 1.0); // distribution3 will be combined
-    verifyMutableDistribution(combined, 0, 8, -20, 20, 1500.0, new long[] {2, 2, 1, 3}, TOLERANCE);
+    verifyMutableDistribution(combined, 0, 8, 1500.0, new long[] {5, 3});
   }
 
   @Test
@@ -274,14 +275,7 @@
     assertThat(MutableCount.create().toAggregationData()).isEqualTo(CountData.create(0));
     assertThat(MutableMean.create().toAggregationData()).isEqualTo(MeanData.create(0, 0));
     assertThat(MutableDistribution.create(BUCKET_BOUNDARIES).toAggregationData())
-        .isEqualTo(
-            DistributionData.create(
-                0,
-                0,
-                Double.POSITIVE_INFINITY,
-                Double.NEGATIVE_INFINITY,
-                0,
-                Arrays.asList(0L, 0L, 0L, 0L)));
+        .isEqualTo(DistributionData.create(0, 0, 0, Arrays.asList(0L, 0L)));
     assertThat(MutableLastValueDouble.create().toAggregationData())
         .isEqualTo(LastValueDataDouble.create(Double.NaN));
     assertThat(MutableLastValueLong.create().toAggregationData())
@@ -299,8 +293,6 @@
     assertThat(MutableMean.create().toPoint(TIMESTAMP))
         .isEqualTo(Point.create(Value.doubleValue(0), TIMESTAMP));
 
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("bucket boundary should be > 0");
     assertThat(MutableDistribution.create(BUCKET_BOUNDARIES).toPoint(TIMESTAMP))
         .isEqualTo(
             Point.create(
@@ -310,11 +302,7 @@
                         0,
                         0,
                         BucketOptions.explicitOptions(BUCKET_BOUNDARIES.getBoundaries()),
-                        Arrays.asList(
-                            Bucket.create(0),
-                            Bucket.create(0),
-                            Bucket.create(0),
-                            Bucket.create(0)))),
+                        Arrays.asList(Bucket.create(0), Bucket.create(0)))),
                 TIMESTAMP));
   }
 
@@ -322,17 +310,12 @@
       MutableDistribution mutableDistribution,
       double mean,
       long count,
-      double min,
-      double max,
       double sumOfSquaredDeviations,
-      long[] bucketCounts,
-      double tolerance) {
-    assertThat(mutableDistribution.getMean()).isWithin(tolerance).of(mean);
+      long[] bucketCounts) {
+    assertThat(mutableDistribution.getMean()).isWithin(MutableAggregationTest.TOLERANCE).of(mean);
     assertThat(mutableDistribution.getCount()).isEqualTo(count);
-    assertThat(mutableDistribution.getMin()).isWithin(tolerance).of(min);
-    assertThat(mutableDistribution.getMax()).isWithin(tolerance).of(max);
     assertThat(mutableDistribution.getSumOfSquaredDeviations())
-        .isWithin(tolerance)
+        .isWithin(MutableAggregationTest.TOLERANCE)
         .of(sumOfSquaredDeviations);
     assertThat(mutableDistribution.getBucketCounts()).isEqualTo(bucketCounts);
   }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/MutableViewDataTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/MutableViewDataTest.java
index 06f50fe..03e03d7 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/MutableViewDataTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/MutableViewDataTest.java
@@ -19,6 +19,19 @@
 import static com.google.common.truth.Truth.assertThat;
 
 import io.opencensus.common.Timestamp;
+import io.opencensus.implcore.internal.CurrentState;
+import io.opencensus.implcore.tags.TagMapImpl;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Count;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.tags.TagKey;
+import java.util.Arrays;
+import java.util.Collections;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
@@ -31,4 +44,53 @@
   public void testConstants() {
     assertThat(MutableViewData.ZERO_TIMESTAMP).isEqualTo(Timestamp.create(0, 0));
   }
+
+  @Test
+  public void testTimeRewindsOnCountViewNoThrow() {
+    // First we set up some buckets THEN we rewind time for giggles.
+    View tester =
+        View.create(
+            View.Name.create("view"),
+            "Description",
+            MeasureDouble.create("name", "desc", "us"),
+            Count.create(),
+            Collections.singletonList(TagKey.create("KEY")));
+    Timestamp start = Timestamp.create(10000000, 0);
+    Timestamp validPointTime = Timestamp.create(10000010, 0);
+    CurrentState.State state = CurrentState.State.ENABLED;
+    MutableViewData viewData = MutableViewData.create(tester, start);
+    // Create a data points to get thrown away.
+    viewData.record(
+        TagMapImpl.EMPTY, 1.0, validPointTime, Collections.<String, AttachmentValue>emptyMap());
+    // Rewind time and look for explosions.
+    Timestamp thePast = Timestamp.create(0, 0);
+    ViewData result = viewData.toViewData(thePast, state);
+    assertThat(result.getAggregationMap()).isEmpty();
+  }
+
+  @Test
+  public void testTimeRewindsOnDistributionViewNoThrow() {
+    // First we set up some buckets THEN we rewind time for giggles.
+    Aggregation latencyDistribution =
+        Distribution.create(
+            BucketBoundaries.create(Arrays.asList(0.0, 25.0, 100.0, 200.0, 400.0, 800.0, 10000.0)));
+    View tester =
+        View.create(
+            View.Name.create("view"),
+            "Description",
+            MeasureDouble.create("name", "desc", "us"),
+            latencyDistribution,
+            Collections.singletonList(TagKey.create("KEY")));
+    Timestamp start = Timestamp.create(10000000, 0);
+    Timestamp validPointTime = Timestamp.create(10000010, 0);
+    CurrentState.State state = CurrentState.State.ENABLED;
+    MutableViewData viewData = MutableViewData.create(tester, start);
+    // Create a data points to get thrown away.
+    viewData.record(
+        TagMapImpl.EMPTY, 1.0, validPointTime, Collections.<String, AttachmentValue>emptyMap());
+    // Rewind time and look for explosions.
+    Timestamp thePast = Timestamp.create(0, 0);
+    ViewData result = viewData.toViewData(thePast, state);
+    assertThat(result.getAggregationMap()).isEmpty();
+  }
 }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/RecordUtilsTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/RecordUtilsTest.java
index 1e22a7a..3328166 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/RecordUtilsTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/RecordUtilsTest.java
@@ -20,6 +20,7 @@
 
 import com.google.common.collect.ImmutableMap;
 import io.opencensus.implcore.stats.MutableAggregation.MutableDistribution;
+import io.opencensus.implcore.tags.TagValueWithMetadata;
 import io.opencensus.stats.Aggregation.Count;
 import io.opencensus.stats.Aggregation.Distribution;
 import io.opencensus.stats.Aggregation.LastValue;
@@ -35,6 +36,8 @@
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.Measure.MeasureLong;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import io.opencensus.tags.TagValue;
 import java.util.Arrays;
 import java.util.List;
@@ -52,11 +55,29 @@
       MeasureDouble.create("measure1", "description", "1");
   private static final MeasureLong MEASURE_LONG =
       MeasureLong.create("measure2", "description", "1");
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
   private static final TagKey ORIGINATOR = TagKey.create("originator");
   private static final TagKey CALLER = TagKey.create("caller");
   private static final TagKey METHOD = TagKey.create("method");
   private static final TagValue CALLER_V = TagValue.create("some caller");
   private static final TagValue METHOD_V = TagValue.create("some method");
+  private static final TagValue METHOD_V_2 = TagValue.create("some other method");
+  private static final TagValue METHOD_V_3 = TagValue.create("the third method");
+  private static final TagValue STATUS_V = TagValue.create("ok");
+  private static final TagValue STATUS_V_2 = TagValue.create("error");
+  private static final TagValueWithMetadata CALLER_V_WITH_MD =
+      TagValueWithMetadata.create(CALLER_V, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata METHOD_V_WITH_MD =
+      TagValueWithMetadata.create(METHOD_V, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata METHOD_V_2_WITH_MD =
+      TagValueWithMetadata.create(METHOD_V_2, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata METHOD_V_3_WITH_MD =
+      TagValueWithMetadata.create(METHOD_V_3, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata STATUS_V_WITH_MD =
+      TagValueWithMetadata.create(STATUS_V, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata STATUS_V_2_WITH_MD =
+      TagValueWithMetadata.create(STATUS_V_2, METADATA_UNLIMITED_PROPAGATION);
 
   @Test
   public void testConstants() {
@@ -66,7 +87,8 @@
   @Test
   public void testGetTagValues() {
     List<TagKey> columns = Arrays.asList(CALLER, METHOD, ORIGINATOR);
-    Map<TagKey, TagValue> tags = ImmutableMap.of(CALLER, CALLER_V, METHOD, METHOD_V);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(CALLER, CALLER_V_WITH_MD, METHOD, METHOD_V_WITH_MD);
 
     assertThat(RecordUtils.getTagValues(tags, columns))
         .containsExactly(CALLER_V, METHOD_V, RecordUtils.UNKNOWN_TAG_VALUE)
@@ -74,6 +96,75 @@
   }
 
   @Test
+  public void testGetTagValues_MapDeprecatedRpcTag() {
+    List<TagKey> columns = Arrays.asList(RecordUtils.RPC_STATUS, RecordUtils.RPC_METHOD);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(
+            RecordUtils.GRPC_CLIENT_METHOD, METHOD_V_WITH_MD,
+            RecordUtils.GRPC_CLIENT_STATUS, STATUS_V_WITH_MD);
+
+    assertThat(RecordUtils.getTagValues(tags, columns))
+        .containsExactly(STATUS_V, METHOD_V)
+        .inOrder();
+  }
+
+  @Test
+  public void testGetTagValues_MapDeprecatedRpcTag_WithServerTag() {
+    List<TagKey> columns = Arrays.asList(RecordUtils.RPC_STATUS, RecordUtils.RPC_METHOD);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(
+            RecordUtils.GRPC_SERVER_METHOD, METHOD_V_WITH_MD,
+            RecordUtils.GRPC_SERVER_STATUS, STATUS_V_WITH_MD);
+
+    assertThat(RecordUtils.getTagValues(tags, columns))
+        .containsExactly(STATUS_V, METHOD_V)
+        .inOrder();
+  }
+
+  @Test
+  public void testGetTagValues_MapDeprecatedRpcTag_PreferClientTag() {
+    List<TagKey> columns = Arrays.asList(RecordUtils.RPC_STATUS, RecordUtils.RPC_METHOD);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(
+            RecordUtils.GRPC_SERVER_METHOD, METHOD_V_WITH_MD,
+            RecordUtils.GRPC_SERVER_STATUS, STATUS_V_WITH_MD,
+            RecordUtils.GRPC_CLIENT_METHOD, METHOD_V_2_WITH_MD,
+            RecordUtils.GRPC_CLIENT_STATUS, STATUS_V_2_WITH_MD);
+
+    // When both client and server new tags are present, client values take precedence.
+    assertThat(RecordUtils.getTagValues(tags, columns))
+        .containsExactly(STATUS_V_2, METHOD_V_2)
+        .inOrder();
+  }
+
+  @Test
+  public void testGetTagValues_WithOldMethodTag() {
+    List<TagKey> columns = Arrays.asList(RecordUtils.RPC_METHOD);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(
+            RecordUtils.GRPC_SERVER_METHOD, METHOD_V_WITH_MD,
+            RecordUtils.GRPC_CLIENT_METHOD, METHOD_V_2_WITH_MD,
+            RecordUtils.RPC_METHOD, METHOD_V_3_WITH_MD);
+
+    // When the old "method" tag is set, it always takes precedence.
+    assertThat(RecordUtils.getTagValues(tags, columns)).containsExactly(METHOD_V_3).inOrder();
+  }
+
+  @Test
+  public void testGetTagValues_WithNewTags() {
+    List<TagKey> columns =
+        Arrays.asList(RecordUtils.GRPC_CLIENT_METHOD, RecordUtils.GRPC_SERVER_METHOD);
+    Map<TagKey, TagValueWithMetadata> tags =
+        ImmutableMap.of(
+            RecordUtils.GRPC_SERVER_METHOD, METHOD_V_WITH_MD,
+            RecordUtils.GRPC_CLIENT_METHOD, METHOD_V_2_WITH_MD);
+
+    assertThat(RecordUtils.getTagValues(tags, columns))
+        .containsExactly(METHOD_V_2, METHOD_V)
+        .inOrder();
+  }
+
+  @Test
   public void createMutableAggregation() {
     BucketBoundaries bucketBoundaries = BucketBoundaries.create(Arrays.asList(-1.0, 0.0, 1.0));
 
@@ -108,9 +199,7 @@
         (MutableDistribution)
             RecordUtils.createMutableAggregation(
                 Distribution.create(bucketBoundaries), MEASURE_DOUBLE);
-    assertThat(mutableDistribution.getMin()).isPositiveInfinity();
-    assertThat(mutableDistribution.getMax()).isNegativeInfinity();
     assertThat(mutableDistribution.getSumOfSquaredDeviations()).isWithin(EPSILON).of(0);
-    assertThat(mutableDistribution.getBucketCounts()).isEqualTo(new long[4]);
+    assertThat(mutableDistribution.getBucketCounts()).isEqualTo(new long[2]);
   }
 }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/StatsRecorderImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/StatsRecorderImplTest.java
index bd8b5b8..abd4a29 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/StatsRecorderImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/StatsRecorderImplTest.java
@@ -26,12 +26,14 @@
 import io.opencensus.common.Timestamp;
 import io.opencensus.implcore.internal.SimpleEventQueue;
 import io.opencensus.implcore.stats.StatsTestUtil.SimpleTagContext;
+import io.opencensus.metrics.data.AttachmentValue;
+import io.opencensus.metrics.data.AttachmentValue.AttachmentValueString;
+import io.opencensus.metrics.data.Exemplar;
 import io.opencensus.stats.Aggregation.Count;
 import io.opencensus.stats.Aggregation.Distribution;
 import io.opencensus.stats.Aggregation.Sum;
 import io.opencensus.stats.AggregationData.CountData;
 import io.opencensus.stats.AggregationData.DistributionData;
-import io.opencensus.stats.AggregationData.DistributionData.Exemplar;
 import io.opencensus.stats.BucketBoundaries;
 import io.opencensus.stats.Measure.MeasureDouble;
 import io.opencensus.stats.MeasureMap;
@@ -76,6 +78,9 @@
       Distribution.create(BucketBoundaries.create(Collections.<Double>emptyList()));
   private static final Timestamp START_TIME = Timestamp.fromMillis(0);
   private static final Duration ONE_SECOND = Duration.fromMillis(1000);
+  private static final AttachmentValue ATTACHMENT_VALUE_1 = AttachmentValueString.create("v1");
+  private static final AttachmentValue ATTACHMENT_VALUE_2 = AttachmentValueString.create("v2");
+  private static final AttachmentValue ATTACHMENT_VALUE_3 = AttachmentValueString.create("v3");
 
   private final TestClock testClock = TestClock.create();
   private final StatsComponent statsComponent =
@@ -114,10 +119,8 @@
             Arrays.asList(KEY),
             Cumulative.create());
     viewManager.registerView(view);
-    Context orig =
-        Context.current()
-            .withValue(ContextUtils.TAG_CONTEXT_KEY, new SimpleTagContext(Tag.create(KEY, VALUE)))
-            .attach();
+    TagContext tags = new SimpleTagContext(Tag.create(KEY, VALUE));
+    Context orig = ContextUtils.withValue(Context.current(), tags).attach();
     try {
       statsRecorder.newMeasureMap().put(MEASURE_DOUBLE, 1.0).record();
     } finally {
@@ -170,10 +173,10 @@
         (DistributionData) viewData.getAggregationMap().get(Collections.singletonList(VALUE));
     List<Exemplar> expected =
         Arrays.asList(
-            Exemplar.create(-20.0, Timestamp.create(4, 0), Collections.singletonMap("k3", "v1")),
-            Exemplar.create(-5.0, Timestamp.create(5, 0), Collections.singletonMap("k3", "v3")),
-            Exemplar.create(1.0, Timestamp.create(2, 0), Collections.singletonMap("k2", "v2")),
-            Exemplar.create(12.0, Timestamp.create(3, 0), Collections.singletonMap("k1", "v3")));
+            Exemplar.create(
+                1.0, Timestamp.create(2, 0), Collections.singletonMap("k2", ATTACHMENT_VALUE_2)),
+            Exemplar.create(
+                12.0, Timestamp.create(3, 0), Collections.singletonMap("k1", ATTACHMENT_VALUE_3)));
     assertThat(distributionData.getExemplars()).containsExactlyElementsIn(expected).inOrder();
   }
 
@@ -209,7 +212,7 @@
     CountData countData =
         (CountData) viewData.getAggregationMap().get(Collections.singletonList(VALUE));
     // Recording exemplar does not affect views with an aggregation other than distribution.
-    assertThat(countData.getCount()).isEqualTo(6L);
+    assertThat(countData.getCount()).isEqualTo(2L);
   }
 
   private void recordWithAttachments() {
@@ -222,7 +225,7 @@
     statsRecorder
         .newMeasureMap()
         .put(MEASURE_DOUBLE, -1.0)
-        .putAttachment("k1", "v1")
+        .putAttachment("k1", ATTACHMENT_VALUE_1)
         .record(context);
 
     testClock.advanceTime(ONE_SECOND); // 2nd second.
@@ -230,7 +233,7 @@
     statsRecorder
         .newMeasureMap()
         .put(MEASURE_DOUBLE, 1.0)
-        .putAttachment("k2", "v2")
+        .putAttachment("k2", ATTACHMENT_VALUE_2)
         .record(context);
 
     testClock.advanceTime(ONE_SECOND); // 3rd second.
@@ -238,7 +241,7 @@
     statsRecorder
         .newMeasureMap()
         .put(MEASURE_DOUBLE, 12.0)
-        .putAttachment("k1", "v3")
+        .putAttachment("k1", ATTACHMENT_VALUE_3)
         .record(context);
 
     testClock.advanceTime(ONE_SECOND); // 4th second.
@@ -246,7 +249,7 @@
     statsRecorder
         .newMeasureMap()
         .put(MEASURE_DOUBLE, -20.0)
-        .putAttachment("k3", "v1")
+        .putAttachment("k3", ATTACHMENT_VALUE_1)
         .record(context);
 
     testClock.advanceTime(ONE_SECOND); // 5th second.
@@ -254,7 +257,7 @@
     statsRecorder
         .newMeasureMap()
         .put(MEASURE_DOUBLE, -5.0)
-        .putAttachment("k3", "v3")
+        .putAttachment("k3", ATTACHMENT_VALUE_3)
         .record(context);
 
     testClock.advanceTime(ONE_SECOND); // 6th second.
@@ -291,6 +294,30 @@
   }
 
   @Test
+  public void record_MapDeprecatedRpcConstants() {
+    View view =
+        View.create(
+            VIEW_NAME,
+            "description",
+            MEASURE_DOUBLE,
+            Sum.create(),
+            Arrays.asList(RecordUtils.RPC_METHOD));
+
+    viewManager.registerView(view);
+    MeasureMap statsRecord = statsRecorder.newMeasureMap().put(MEASURE_DOUBLE, 1.0);
+    statsRecord.record(new SimpleTagContext(Tag.create(RecordUtils.GRPC_CLIENT_METHOD, VALUE)));
+    ViewData viewData = viewManager.getView(VIEW_NAME);
+
+    // There should be two entries.
+    StatsTestUtil.assertAggregationMapEquals(
+        viewData.getAggregationMap(),
+        ImmutableMap.of(
+            Arrays.asList(VALUE),
+            StatsTestUtil.createAggregationData(Sum.create(), MEASURE_DOUBLE, 1.0)),
+        1e-6);
+  }
+
+  @Test
   @SuppressWarnings("deprecation")
   public void record_StatsDisabled() {
     View view =
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/StatsTestUtil.java b/impl_core/src/test/java/io/opencensus/implcore/stats/StatsTestUtil.java
index ea1bf34..5b8f704 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/StatsTestUtil.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/StatsTestUtil.java
@@ -24,6 +24,7 @@
 import io.opencensus.common.Function;
 import io.opencensus.common.Functions;
 import io.opencensus.common.Timestamp;
+import io.opencensus.metrics.data.AttachmentValue;
 import io.opencensus.stats.Aggregation;
 import io.opencensus.stats.AggregationData;
 import io.opencensus.stats.AggregationData.CountData;
@@ -70,7 +71,7 @@
     MutableAggregation mutableAggregation =
         RecordUtils.createMutableAggregation(aggregation, measure);
     for (double value : values) {
-      mutableAggregation.add(value, Collections.<String, String>emptyMap(), EMPTY);
+      mutableAggregation.add(value, Collections.<String, AttachmentValue>emptyMap(), EMPTY);
     }
     return mutableAggregation.toAggregationData();
   }
@@ -192,16 +193,7 @@
         .isWithin(tolerance)
         .of(expected.getSumOfSquaredDeviations());
 
-    if (expected.getMax() == Double.NEGATIVE_INFINITY
-        && expected.getMin() == Double.POSITIVE_INFINITY) {
-      assertThat(actual.getMax()).isNegativeInfinity();
-      assertThat(actual.getMin()).isPositiveInfinity();
-    } else {
-      assertThat(actual.getMax()).isWithin(tolerance).of(expected.getMax());
-      assertThat(actual.getMin()).isWithin(tolerance).of(expected.getMin());
-    }
-
-    assertThat(removeTrailingZeros((actual).getBucketCounts()))
+    assertThat(removeTrailingZeros(actual.getBucketCounts()))
         .isEqualTo(removeTrailingZeros(expected.getBucketCounts()));
   }
 
diff --git a/impl_core/src/test/java/io/opencensus/implcore/stats/ViewManagerImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/stats/ViewManagerImplTest.java
index a4018b7..c5531c3 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/stats/ViewManagerImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/stats/ViewManagerImplTest.java
@@ -324,10 +324,10 @@
     testRecordInterval(
         MEASURE_DOUBLE,
         MEAN,
-        new double[] {20.0, -1.0, 1.0, -5.0, 5.0},
+        new double[] {20.0, 1.0, 1.0, 5.0, 5.0},
         9.0,
         30.0,
-        MeanData.create((19 * 0.6 + 1) / 4, 4),
+        MeanData.create((21 * 0.6 + 11) / 4, 4),
         MeanData.create(0.2 * 5 + 9, 1),
         MeanData.create(30.0, 1));
   }
@@ -341,7 +341,7 @@
         -5000,
         30,
         MeanData.create((3000 * 0.6 + 12000) / 4, 4),
-        MeanData.create(-4000, 1),
+        MeanData.create(0, 1),
         MeanData.create(30, 1));
   }
 
@@ -350,10 +350,10 @@
     testRecordInterval(
         MEASURE_DOUBLE,
         SUM,
-        new double[] {20.0, -1.0, 1.0, -5.0, 5.0},
+        new double[] {20.0, 1.0, 1.0, 5.0, 5.0},
         9.0,
         30.0,
-        SumDataDouble.create(19 * 0.6 + 1),
+        SumDataDouble.create(21 * 0.6 + 11),
         SumDataDouble.create(0.2 * 5 + 9),
         SumDataDouble.create(30.0));
   }
@@ -367,7 +367,7 @@
         -50,
         30,
         SumDataLong.create(Math.round(34 * 0.6 + 120)),
-        SumDataLong.create(-40),
+        SumDataLong.create(10),
         SumDataLong.create(30));
   }
 
@@ -393,7 +393,7 @@
         -5000,
         30,
         LastValueDataLong.create(5000),
-        LastValueDataLong.create(-5000),
+        LastValueDataLong.create(0),
         LastValueDataLong.create(30));
   }
 
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagContextUtilsTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagMapUtilsTest.java
similarity index 64%
rename from impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagContextUtilsTest.java
rename to impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagMapUtilsTest.java
index 1a14ac6..047a138 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagContextUtilsTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/CurrentTagMapUtilsTest.java
@@ -32,9 +32,9 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
-/** Unit tests for {@link CurrentTagContextUtils}. */
+/** Unit tests for {@link CurrentTagMapUtils}. */
 @RunWith(JUnit4.class)
-public class CurrentTagContextUtilsTest {
+public class CurrentTagMapUtilsTest {
   private static final Tag TAG = Tag.create(TagKey.create("key"), TagValue.create("value"));
 
   private final TagContext tagContext =
@@ -47,17 +47,17 @@
       };
 
   @Test
-  public void testGetCurrentTagContext_DefaultContext() {
-    TagContext tags = CurrentTagContextUtils.getCurrentTagContext();
+  public void testGetCurrentTagMap_DefaultContext() {
+    TagContext tags = CurrentTagMapUtils.getCurrentTagMap();
     assertThat(tags).isNotNull();
     assertThat(tagContextToList(tags)).isEmpty();
   }
 
   @Test
-  public void testGetCurrentTagContext_ContextSetToNull() {
-    Context orig = Context.current().withValue(ContextUtils.TAG_CONTEXT_KEY, null).attach();
+  public void testGetCurrentTagMap_ContextSetToNull() {
+    Context orig = ContextUtils.withValue(Context.current(), null).attach();
     try {
-      TagContext tags = CurrentTagContextUtils.getCurrentTagContext();
+      TagContext tags = CurrentTagMapUtils.getCurrentTagMap();
       assertThat(tags).isNotNull();
       assertThat(tagContextToList(tags)).isEmpty();
     } finally {
@@ -66,37 +66,37 @@
   }
 
   @Test
-  public void testWithTagContext() {
-    assertThat(tagContextToList(CurrentTagContextUtils.getCurrentTagContext())).isEmpty();
-    Scope scopedTags = CurrentTagContextUtils.withTagContext(tagContext);
+  public void testWithTagMap() {
+    assertThat(tagContextToList(CurrentTagMapUtils.getCurrentTagMap())).isEmpty();
+    Scope scopedTags = CurrentTagMapUtils.withTagMap(tagContext);
     try {
-      assertThat(CurrentTagContextUtils.getCurrentTagContext()).isSameAs(tagContext);
+      assertThat(CurrentTagMapUtils.getCurrentTagMap()).isSameInstanceAs(tagContext);
     } finally {
       scopedTags.close();
     }
-    assertThat(tagContextToList(CurrentTagContextUtils.getCurrentTagContext())).isEmpty();
+    assertThat(tagContextToList(CurrentTagMapUtils.getCurrentTagMap())).isEmpty();
   }
 
   @Test
-  public void testWithTagContextUsingWrap() {
+  public void testWithTagMapUsingWrap() {
     Runnable runnable;
-    Scope scopedTags = CurrentTagContextUtils.withTagContext(tagContext);
+    Scope scopedTags = CurrentTagMapUtils.withTagMap(tagContext);
     try {
-      assertThat(CurrentTagContextUtils.getCurrentTagContext()).isSameAs(tagContext);
+      assertThat(CurrentTagMapUtils.getCurrentTagMap()).isSameInstanceAs(tagContext);
       runnable =
           Context.current()
               .wrap(
                   new Runnable() {
                     @Override
                     public void run() {
-                      assertThat(CurrentTagContextUtils.getCurrentTagContext())
-                          .isSameAs(tagContext);
+                      assertThat(CurrentTagMapUtils.getCurrentTagMap())
+                          .isSameInstanceAs(tagContext);
                     }
                   });
     } finally {
       scopedTags.close();
     }
-    assertThat(tagContextToList(CurrentTagContextUtils.getCurrentTagContext())).isEmpty();
+    assertThat(tagContextToList(CurrentTagMapUtils.getCurrentTagMap())).isEmpty();
     // When we run the runnable we will have the TagContext in the current Context.
     runnable.run();
   }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagContextsTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagMapTest.java
similarity index 64%
rename from impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagContextsTest.java
rename to impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagMapTest.java
index 6a8fe4c..ddbef6f 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagContextsTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/ScopedTagMapTest.java
@@ -25,6 +25,8 @@
 import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import io.opencensus.tags.TagValue;
 import io.opencensus.tags.Tagger;
 import org.junit.Test;
@@ -32,16 +34,24 @@
 import org.junit.runners.JUnit4;
 
 /**
- * Unit tests for the methods in {@link TaggerImpl} and {@link TagContextBuilderImpl} that interact
- * with the current {@link TagContext}.
+ * Unit tests for the methods in {@link TaggerImpl} and {@link TagMapBuilderImpl} that interact with
+ * the current {@link TagContext}.
  */
 @RunWith(JUnit4.class)
-public class ScopedTagContextsTest {
+public class ScopedTagMapTest {
   private static final TagKey KEY_1 = TagKey.create("key 1");
   private static final TagKey KEY_2 = TagKey.create("key 2");
+  private static final TagKey KEY_3 = TagKey.create("key 3");
 
   private static final TagValue VALUE_1 = TagValue.create("value 1");
   private static final TagValue VALUE_2 = TagValue.create("value 2");
+  private static final TagValue VALUE_3 = TagValue.create("value 3");
+  private static final TagValue VALUE_4 = TagValue.create("value 4");
+
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
 
   private final Tagger tagger = new TaggerImpl(new CurrentState(State.ENABLED));
 
@@ -49,7 +59,7 @@
   public void defaultTagContext() {
     TagContext defaultTagContext = tagger.getCurrentTagContext();
     assertThat(tagContextToList(defaultTagContext)).isEmpty();
-    assertThat(defaultTagContext).isInstanceOf(TagContextImpl.class);
+    assertThat(defaultTagContext).isInstanceOf(TagMapImpl.class);
   }
 
   @Test
@@ -58,7 +68,7 @@
     TagContext scopedTags = tagger.emptyBuilder().put(KEY_1, VALUE_1).build();
     Scope scope = tagger.withTagContext(scopedTags);
     try {
-      assertThat(tagger.getCurrentTagContext()).isSameAs(scopedTags);
+      assertThat(tagger.getCurrentTagContext()).isSameInstanceAs(scopedTags);
     } finally {
       scope.close();
     }
@@ -73,7 +83,7 @@
       TagContext newTags = tagger.currentBuilder().put(KEY_2, VALUE_2).build();
       assertThat(tagContextToList(newTags))
           .containsExactly(Tag.create(KEY_1, VALUE_1), Tag.create(KEY_2, VALUE_2));
-      assertThat(tagger.getCurrentTagContext()).isSameAs(scopedTags);
+      assertThat(tagger.getCurrentTagContext()).isSameInstanceAs(scopedTags);
     } finally {
       scope.close();
     }
@@ -104,7 +114,38 @@
       } finally {
         scope2.close();
       }
-      assertThat(tagger.getCurrentTagContext()).isSameAs(scopedTags);
+      assertThat(tagger.getCurrentTagContext()).isSameInstanceAs(scopedTags);
+    } finally {
+      scope1.close();
+    }
+  }
+
+  @Test
+  public void multiScopeTagMapWithMetadata() {
+    TagContext scopedTags =
+        tagger
+            .emptyBuilder()
+            .put(KEY_1, VALUE_1, METADATA_UNLIMITED_PROPAGATION)
+            .put(KEY_2, VALUE_2, METADATA_UNLIMITED_PROPAGATION)
+            .build();
+    Scope scope1 = tagger.withTagContext(scopedTags);
+    try { // Scope 1
+      Scope scope2 =
+          tagger
+              .currentBuilder()
+              .put(KEY_3, VALUE_3, METADATA_NO_PROPAGATION)
+              .put(KEY_2, VALUE_4, METADATA_NO_PROPAGATION)
+              .buildScoped();
+      try { // Scope 2
+        assertThat(tagContextToList(tagger.getCurrentTagContext()))
+            .containsExactly(
+                Tag.create(KEY_1, VALUE_1, METADATA_UNLIMITED_PROPAGATION),
+                Tag.create(KEY_2, VALUE_4, METADATA_NO_PROPAGATION),
+                Tag.create(KEY_3, VALUE_3, METADATA_NO_PROPAGATION));
+      } finally {
+        scope2.close(); // Close Scope 2
+      }
+      assertThat(tagger.getCurrentTagContext()).isSameInstanceAs(scopedTags);
     } finally {
       scope1.close();
     }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/TagContextImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/TagMapImplTest.java
similarity index 62%
rename from impl_core/src/test/java/io/opencensus/implcore/tags/TagContextImplTest.java
rename to impl_core/src/test/java/io/opencensus/implcore/tags/TagMapImplTest.java
index 1859e08..ef565e8 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/TagContextImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/TagMapImplTest.java
@@ -29,6 +29,8 @@
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.TagContextBuilder;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import io.opencensus.tags.TagValue;
 import io.opencensus.tags.Tagger;
 import java.util.Arrays;
@@ -41,51 +43,61 @@
 import org.junit.runners.JUnit4;
 
 /**
- * Tests for {@link TagContextImpl} and {@link TagContextBuilderImpl}.
+ * Tests for {@link TagMapImpl} and {@link TagMapBuilderImpl}.
  *
- * <p>Tests for {@link TagContextBuilderImpl#buildScoped()} are in {@link ScopedTagContextsTest}.
+ * <p>Tests for {@link TagMapBuilderImpl#buildScoped()} are in {@link ScopedTagMapTest}.
  */
 @RunWith(JUnit4.class)
-public class TagContextImplTest {
+public class TagMapImplTest {
   private final Tagger tagger = new TaggerImpl(new CurrentState(State.ENABLED));
 
+  private static final TagMetadata METADATA_UNLIMITED_PROPAGATION =
+      TagMetadata.create(TagTtl.UNLIMITED_PROPAGATION);
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
+
   private static final TagKey K1 = TagKey.create("k1");
   private static final TagKey K2 = TagKey.create("k2");
 
   private static final TagValue V1 = TagValue.create("v1");
   private static final TagValue V2 = TagValue.create("v2");
 
+  private static final TagValueWithMetadata VM1 =
+      TagValueWithMetadata.create(V1, METADATA_UNLIMITED_PROPAGATION);
+  private static final TagValueWithMetadata VM2 =
+      TagValueWithMetadata.create(V2, METADATA_UNLIMITED_PROPAGATION);
+
   @Rule public final ExpectedException thrown = ExpectedException.none();
 
   @Test
   public void getTags_empty() {
-    TagContextImpl tags = new TagContextImpl(ImmutableMap.<TagKey, TagValue>of());
+    TagMapImpl tags = new TagMapImpl(ImmutableMap.<TagKey, TagValueWithMetadata>of());
     assertThat(tags.getTags()).isEmpty();
   }
 
   @Test
   public void getTags_nonEmpty() {
-    TagContextImpl tags = new TagContextImpl(ImmutableMap.of(K1, V1, K2, V2));
-    assertThat(tags.getTags()).containsExactly(K1, V1, K2, V2);
+    TagMapImpl tags = new TagMapImpl(ImmutableMap.of(K1, VM1, K2, VM2));
+    assertThat(tags.getTags()).containsExactly(K1, VM1, K2, VM2);
   }
 
   @Test
   public void put_newKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
-    assertThat(((TagContextImpl) tagger.toBuilder(tags).put(K2, V2).build()).getTags())
-        .containsExactly(K1, V1, K2, V2);
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
+    assertThat(((TagMapImpl) tagger.toBuilder(tags).put(K2, V2).build()).getTags())
+        .containsExactly(K1, VM1, K2, VM2);
   }
 
   @Test
   public void put_existingKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
-    assertThat(((TagContextImpl) tagger.toBuilder(tags).put(K1, V2).build()).getTags())
-        .containsExactly(K1, V2);
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
+    assertThat(((TagMapImpl) tagger.toBuilder(tags).put(K1, V2).build()).getTags())
+        .containsExactly(K1, VM2);
   }
 
   @Test
   public void put_nullKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
     TagContextBuilder builder = tagger.toBuilder(tags);
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("key");
@@ -94,7 +106,7 @@
 
   @Test
   public void put_nullValue() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
     TagContextBuilder builder = tagger.toBuilder(tags);
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("value");
@@ -102,22 +114,36 @@
   }
 
   @Test
+  public void putLocal() {
+    TagContext tags1 = tagger.emptyBuilder().put(K1, V1, METADATA_NO_PROPAGATION).build();
+    TagContext tags2 = tagger.emptyBuilder().putLocal(K1, V1).build();
+    assertThat(tags1).isEqualTo(tags2);
+  }
+
+  @Test
+  public void putPropagating() {
+    TagContext tags1 = tagger.emptyBuilder().put(K1, V1, METADATA_UNLIMITED_PROPAGATION).build();
+    TagContext tags2 = tagger.emptyBuilder().putPropagating(K1, V1).build();
+    assertThat(tags1).isEqualTo(tags2);
+  }
+
+  @Test
   public void remove_existingKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1, K2, V2));
-    assertThat(((TagContextImpl) tagger.toBuilder(tags).remove(K1).build()).getTags())
-        .containsExactly(K2, V2);
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1, K2, VM2));
+    assertThat(((TagMapImpl) tagger.toBuilder(tags).remove(K1).build()).getTags())
+        .containsExactly(K2, VM2);
   }
 
   @Test
   public void remove_differentKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
-    assertThat(((TagContextImpl) tagger.toBuilder(tags).remove(K2).build()).getTags())
-        .containsExactly(K1, V1);
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
+    assertThat(((TagMapImpl) tagger.toBuilder(tags).remove(K2).build()).getTags())
+        .containsExactly(K1, VM1);
   }
 
   @Test
   public void remove_nullKey() {
-    TagContext tags = new TagContextImpl(ImmutableMap.of(K1, V1));
+    TagContext tags = new TagMapImpl(ImmutableMap.of(K1, VM1));
     TagContextBuilder builder = tagger.toBuilder(tags);
     thrown.expect(NullPointerException.class);
     thrown.expectMessage("key");
@@ -126,7 +152,7 @@
 
   @Test
   public void testIterator() {
-    TagContextImpl tags = new TagContextImpl(ImmutableMap.of(K1, V1, K2, V2));
+    TagMapImpl tags = new TagMapImpl(ImmutableMap.of(K1, VM1, K2, VM2));
     Iterator<Tag> i = tags.getIterator();
     assertTrue(i.hasNext());
     Tag tag1 = i.next();
@@ -140,7 +166,7 @@
 
   @Test
   public void disallowCallingRemoveOnIterator() {
-    TagContextImpl tags = new TagContextImpl(ImmutableMap.of(K1, V1, K2, V2));
+    TagMapImpl tags = new TagMapImpl(ImmutableMap.of(K1, VM1, K2, VM2));
     Iterator<Tag> i = tags.getIterator();
     i.next();
     thrown.expect(UnsupportedOperationException.class);
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/TaggerImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/TaggerImplTest.java
index 4ca2ae7..34753b8 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/TaggerImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/TaggerImplTest.java
@@ -60,36 +60,36 @@
   @Test
   public void empty() {
     assertThat(tagContextToList(tagger.empty())).isEmpty();
-    assertThat(tagger.empty()).isInstanceOf(TagContextImpl.class);
+    assertThat(tagger.empty()).isInstanceOf(TagMapImpl.class);
   }
 
   @Test
   public void empty_TaggingDisabled() {
     tagsComponent.setState(TaggingState.DISABLED);
     assertThat(tagContextToList(tagger.empty())).isEmpty();
-    assertThat(tagger.empty()).isInstanceOf(TagContextImpl.class);
+    assertThat(tagger.empty()).isInstanceOf(TagMapImpl.class);
   }
 
   @Test
   public void emptyBuilder() {
     TagContextBuilder builder = tagger.emptyBuilder();
-    assertThat(builder).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(builder).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(builder.build())).isEmpty();
   }
 
   @Test
   public void emptyBuilder_TaggingDisabled() {
     tagsComponent.setState(TaggingState.DISABLED);
-    assertThat(tagger.emptyBuilder()).isSameAs(NoopTagContextBuilder.INSTANCE);
+    assertThat(tagger.emptyBuilder()).isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
   }
 
   @Test
   public void emptyBuilder_TaggingReenabled() {
     tagsComponent.setState(TaggingState.DISABLED);
-    assertThat(tagger.emptyBuilder()).isSameAs(NoopTagContextBuilder.INSTANCE);
+    assertThat(tagger.emptyBuilder()).isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
     tagsComponent.setState(TaggingState.ENABLED);
     TagContextBuilder builder = tagger.emptyBuilder();
-    assertThat(builder).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(builder).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(builder.put(K1, V1).build())).containsExactly(Tag.create(K1, V1));
   }
 
@@ -97,14 +97,14 @@
   public void currentBuilder() {
     TagContext tags = new SimpleTagContext(TAG1, TAG2, TAG3);
     TagContextBuilder result = getResultOfCurrentBuilder(tags);
-    assertThat(result).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(result).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(result.build())).containsExactly(TAG1, TAG2, TAG3);
   }
 
   @Test
   public void currentBuilder_DefaultIsEmpty() {
     TagContextBuilder currentBuilder = tagger.currentBuilder();
-    assertThat(currentBuilder).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(currentBuilder).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(currentBuilder.build())).isEmpty();
   }
 
@@ -128,22 +128,22 @@
   public void currentBuilder_TaggingDisabled() {
     tagsComponent.setState(TaggingState.DISABLED);
     assertThat(getResultOfCurrentBuilder(new SimpleTagContext(TAG1)))
-        .isSameAs(NoopTagContextBuilder.INSTANCE);
+        .isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
   }
 
   @Test
   public void currentBuilder_TaggingReenabled() {
     TagContext tags = new SimpleTagContext(TAG1);
     tagsComponent.setState(TaggingState.DISABLED);
-    assertThat(getResultOfCurrentBuilder(tags)).isSameAs(NoopTagContextBuilder.INSTANCE);
+    assertThat(getResultOfCurrentBuilder(tags)).isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
     tagsComponent.setState(TaggingState.ENABLED);
     TagContextBuilder builder = getResultOfCurrentBuilder(tags);
-    assertThat(builder).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(builder).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(builder.build())).containsExactly(TAG1);
   }
 
   private TagContextBuilder getResultOfCurrentBuilder(TagContext tagsToSet) {
-    Context orig = Context.current().withValue(ContextUtils.TAG_CONTEXT_KEY, tagsToSet).attach();
+    Context orig = ContextUtils.withValue(Context.current(), tagsToSet).attach();
     try {
       return tagger.currentBuilder();
     } finally {
@@ -152,11 +152,11 @@
   }
 
   @Test
-  public void toBuilder_ConvertUnknownTagContextToTagContextImpl() {
+  public void toBuilder_ConvertUnknownTagContextToTagMapImpl() {
     TagContext unknownTagContext = new SimpleTagContext(TAG1, TAG2, TAG3);
     TagContext newTagContext = tagger.toBuilder(unknownTagContext).build();
     assertThat(tagContextToList(newTagContext)).containsExactly(TAG1, TAG2, TAG3);
-    assertThat(newTagContext).isInstanceOf(TagContextImpl.class);
+    assertThat(newTagContext).isInstanceOf(TagMapImpl.class);
   }
 
   @Test
@@ -179,32 +179,32 @@
   public void toBuilder_TaggingDisabled() {
     tagsComponent.setState(TaggingState.DISABLED);
     assertThat(tagger.toBuilder(new SimpleTagContext(TAG1)))
-        .isSameAs(NoopTagContextBuilder.INSTANCE);
+        .isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
   }
 
   @Test
   public void toBuilder_TaggingReenabled() {
     TagContext tags = new SimpleTagContext(TAG1);
     tagsComponent.setState(TaggingState.DISABLED);
-    assertThat(tagger.toBuilder(tags)).isSameAs(NoopTagContextBuilder.INSTANCE);
+    assertThat(tagger.toBuilder(tags)).isSameInstanceAs(NoopTagMapBuilder.INSTANCE);
     tagsComponent.setState(TaggingState.ENABLED);
     TagContextBuilder builder = tagger.toBuilder(tags);
-    assertThat(builder).isInstanceOf(TagContextBuilderImpl.class);
+    assertThat(builder).isInstanceOf(TagMapBuilderImpl.class);
     assertThat(tagContextToList(builder.build())).containsExactly(TAG1);
   }
 
   @Test
-  public void getCurrentTagContext_DefaultIsEmptyTagContextImpl() {
+  public void getCurrentTagContext_DefaultIsEmptyTagMapImpl() {
     TagContext currentTagContext = tagger.getCurrentTagContext();
     assertThat(tagContextToList(currentTagContext)).isEmpty();
-    assertThat(currentTagContext).isInstanceOf(TagContextImpl.class);
+    assertThat(currentTagContext).isInstanceOf(TagMapImpl.class);
   }
 
   @Test
-  public void getCurrentTagContext_ConvertUnknownTagContextToTagContextImpl() {
+  public void getCurrentTagContext_ConvertUnknownTagContextToTagMapImpl() {
     TagContext unknownTagContext = new SimpleTagContext(TAG1, TAG2, TAG3);
     TagContext result = getResultOfGetCurrentTagContext(unknownTagContext);
-    assertThat(result).isInstanceOf(TagContextImpl.class);
+    assertThat(result).isInstanceOf(TagMapImpl.class);
     assertThat(tagContextToList(result)).containsExactly(TAG1, TAG2, TAG3);
   }
 
@@ -241,7 +241,7 @@
   }
 
   private TagContext getResultOfGetCurrentTagContext(TagContext tagsToSet) {
-    Context orig = Context.current().withValue(ContextUtils.TAG_CONTEXT_KEY, tagsToSet).attach();
+    Context orig = ContextUtils.withValue(Context.current(), tagsToSet).attach();
     try {
       return tagger.getCurrentTagContext();
     } finally {
@@ -250,10 +250,10 @@
   }
 
   @Test
-  public void withTagContext_ConvertUnknownTagContextToTagContextImpl() {
+  public void withTagContext_ConvertUnknownTagContextToTagMapImpl() {
     TagContext unknownTagContext = new SimpleTagContext(TAG1, TAG2, TAG3);
     TagContext result = getResultOfWithTagContext(unknownTagContext);
-    assertThat(result).isInstanceOf(TagContextImpl.class);
+    assertThat(result).isInstanceOf(TagMapImpl.class);
     assertThat(tagContextToList(result)).containsExactly(TAG1, TAG2, TAG3);
   }
 
@@ -276,7 +276,8 @@
   @Test
   public void withTagContext_ReturnsNoopScopeWhenTaggingIsDisabled() {
     tagsComponent.setState(TaggingState.DISABLED);
-    assertThat(tagger.withTagContext(new SimpleTagContext(TAG1))).isSameAs(NoopScope.getInstance());
+    assertThat(tagger.withTagContext(new SimpleTagContext(TAG1)))
+        .isSameInstanceAs(NoopScope.getInstance());
   }
 
   @Test
@@ -297,7 +298,7 @@
   private TagContext getResultOfWithTagContext(TagContext tagsToSet) {
     Scope scope = tagger.withTagContext(tagsToSet);
     try {
-      return ContextUtils.TAG_CONTEXT_KEY.get();
+      return ContextUtils.getValue(Context.current());
     } finally {
       scope.close();
     }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormatTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormatTest.java
new file mode 100644
index 0000000..eb1cfe8
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/CorrelationContextFormatTest.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright 2019, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.tags.propagation;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.implcore.tags.propagation.CorrelationContextFormat.CORRELATION_CONTEXT;
+import static io.opencensus.implcore.tags.propagation.CorrelationContextFormat.METADATA_UNLIMITED_PROPAGATION;
+
+import io.opencensus.implcore.tags.TagsComponentImplBase;
+import io.opencensus.implcore.tags.TagsTestUtil;
+import io.opencensus.tags.Tag;
+import io.opencensus.tags.TagContext;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
+import io.opencensus.tags.TagValue;
+import io.opencensus.tags.TaggingState;
+import io.opencensus.tags.TagsComponent;
+import io.opencensus.tags.propagation.TagContextDeserializationException;
+import io.opencensus.tags.propagation.TagContextSerializationException;
+import io.opencensus.tags.propagation.TagContextTextFormat;
+import io.opencensus.tags.propagation.TagContextTextFormat.Getter;
+import io.opencensus.tags.propagation.TagContextTextFormat.Setter;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Random;
+import javax.annotation.Nullable;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Tests for {@link CorrelationContextFormat}. */
+@RunWith(JUnit4.class)
+public class CorrelationContextFormatTest {
+
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  private final TagsComponent tagsComponent = new TagsComponentImplBase();
+  private final TagContextTextFormat textFormat =
+      tagsComponent.getTagPropagationComponent().getCorrelationContextFormat();
+
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
+
+  private static final Setter<Map<String, String>> setter =
+      new Setter<Map<String, String>>() {
+        @Override
+        public void put(Map<String, String> carrier, String key, String value) {
+          carrier.put(key, value);
+        }
+      };
+
+  private static final Getter<Map<String, String>> getter =
+      new Getter<Map<String, String>>() {
+        @Nullable
+        @Override
+        public String get(Map<String, String> carrier, String key) {
+          return carrier.get(key);
+        }
+      };
+
+  private static final TagKey K1 = TagKey.create("k1");
+  private static final TagKey K2 = TagKey.create("k2");
+  private static final TagValue V1 = TagValue.create("v1");
+  private static final TagValue V2 = TagValue.create("v2");
+  private static final Tag T1 = Tag.create(K1, V1, METADATA_UNLIMITED_PROPAGATION);
+  private static final Tag T2 = Tag.create(K2, V2, METADATA_UNLIMITED_PROPAGATION);
+
+  private static final Random random = new Random();
+
+  @Test
+  public void fieldsList() {
+    assertThat(textFormat.fields()).containsExactly(CORRELATION_CONTEXT);
+  }
+
+  @Test
+  public void headerNames() {
+    assertThat(CORRELATION_CONTEXT).isEqualTo("Correlation-Context");
+  }
+
+  @Test
+  public void inject() throws TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    textFormat.inject(makeTagContext(T1, T2), carrier, setter);
+    assertThat(carrier).containsExactly(CORRELATION_CONTEXT, "k1=v1,k2=v2");
+  }
+
+  @Test
+  public void inject_Empty() throws TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    textFormat.inject(makeTagContext(), carrier, setter);
+    assertThat(carrier).containsExactly(CORRELATION_CONTEXT, "");
+  }
+
+  @Test
+  public void inject_SkipNonPropagatingTag() throws TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    Tag tag = Tag.create(K1, V1, METADATA_NO_PROPAGATION);
+    textFormat.inject(makeTagContext(tag), carrier, setter);
+    assertThat(carrier).containsExactly(CORRELATION_CONTEXT, "");
+  }
+
+  @Test
+  public void inject_MixedPropagatingAndNonPropagatingTags()
+      throws TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    Tag tag = Tag.create(K1, V1, METADATA_NO_PROPAGATION);
+    textFormat.inject(makeTagContext(T1, tag, T2), carrier, setter);
+    assertThat(carrier).containsExactly(CORRELATION_CONTEXT, "k1=v1,k2=v2");
+  }
+
+  @Test
+  @SuppressWarnings("deprecation")
+  public void inject_TaggingDisabled() throws TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    tagsComponent.setState(TaggingState.DISABLED);
+    textFormat.inject(makeTagContext(T1, T2), carrier, setter);
+    assertThat(carrier).isEmpty();
+    tagsComponent.setState(TaggingState.ENABLED);
+  }
+
+  @Test
+  public void inject_TooManyTags() throws TagContextSerializationException {
+    Tag[] tags = new Tag[CorrelationContextFormat.MAX_NUMBER_OF_TAGS + 1];
+    for (int i = 0; i < tags.length; i++) {
+      tags[i] =
+          Tag.create(
+              TagKey.create("k" + i), TagValue.create("v" + i), METADATA_UNLIMITED_PROPAGATION);
+    }
+    TagContext tagContext = makeTagContext(tags);
+    Map<String, String> carrier = new HashMap<String, String>();
+    thrown.expect(TagContextSerializationException.class);
+    textFormat.inject(tagContext, carrier, setter);
+  }
+
+  @Test
+  public void inject_SizeTooLarge() throws TagContextSerializationException {
+    Tag[] tags = new Tag[40];
+    for (int i = 0; i < tags.length; i++) {
+      tags[i] =
+          Tag.create(
+              TagKey.create(generateRandom(240)),
+              TagValue.create(generateRandom(240)),
+              METADATA_UNLIMITED_PROPAGATION);
+    }
+    TagContext tagContext = makeTagContext(tags);
+    Map<String, String> carrier = new HashMap<String, String>();
+    thrown.expect(TagContextSerializationException.class);
+    textFormat.inject(tagContext, carrier, setter);
+  }
+
+  @Test
+  public void extract() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1=v1,k2=v2");
+    TagContext tagContext = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(tagContext)).containsExactly(T1, T2);
+  }
+
+  @Test
+  public void extract_Empty() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "");
+    TagContext tagContext = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(tagContext)).isEmpty();
+  }
+
+  @Test
+  public void extract_WithUnknownProperties() throws TagContextDeserializationException {
+    Map<String, String> carrier =
+        Collections.singletonMap(CORRELATION_CONTEXT, "k1=v1;property1=p1;property2=p2,k2=v2");
+    Tag expected = Tag.create(K1, TagValue.create("v1"), METADATA_UNLIMITED_PROPAGATION);
+    TagContext tagContext = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(tagContext)).containsExactly(expected, T2);
+  }
+
+  @Test
+  public void extract_TrimSpaces() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1= v1, k2=v2 ");
+    Tag expected1 = Tag.create(K1, V1, METADATA_UNLIMITED_PROPAGATION);
+    Tag expected2 = Tag.create(K2, V2, METADATA_UNLIMITED_PROPAGATION);
+    TagContext tagContext = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(tagContext)).containsExactly(expected1, expected2);
+  }
+
+  @Test
+  public void extract_OverrideTagWithSpaces() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1= v1, k1=v2 ");
+    Tag expected = Tag.create(K1, V2, METADATA_UNLIMITED_PROPAGATION);
+    TagContext tagContext = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(tagContext)).containsExactly(expected);
+  }
+
+  @Test
+  public void extract_NoCorrelationContextHeader() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap("unknown-header", "value");
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  public void extract_MalformedTag() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1,v1,k2=v2");
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  public void extract_MalformedTagKey() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1=v1,ké=v2");
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  public void extract_MalformedTagValue() throws TagContextDeserializationException {
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1=v1,k2=vé");
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  public void extract_TagKeyTooLong() throws TagContextDeserializationException {
+    String longKey = generateRandom(300);
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, longKey + "=v1");
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  public void extract_TagValueTooLong() throws TagContextDeserializationException {
+    String longValue = generateRandom(300);
+    Map<String, String> carrier = Collections.singletonMap(CORRELATION_CONTEXT, "k1=" + longValue);
+    thrown.expect(TagContextDeserializationException.class);
+    textFormat.extract(carrier, getter);
+  }
+
+  @Test
+  @SuppressWarnings("deprecation")
+  public void extract_TaggingDisabled()
+      throws TagContextDeserializationException, TagContextSerializationException {
+    Map<String, String> carrier = new HashMap<String, String>();
+    textFormat.inject(makeTagContext(T1), carrier, setter);
+    tagsComponent.setState(TaggingState.DISABLED);
+    assertThat(TagsTestUtil.tagContextToList(textFormat.extract(carrier, getter))).isEmpty();
+    tagsComponent.setState(TaggingState.ENABLED);
+  }
+
+  @Test
+  public void roundTrip()
+      throws TagContextSerializationException, TagContextDeserializationException {
+    Tag[] tags = new Tag[40];
+    for (int i = 0; i < tags.length; i++) {
+      tags[i] =
+          Tag.create(
+              TagKey.create(generateRandom(10)),
+              TagValue.create(generateRandom(10)),
+              METADATA_UNLIMITED_PROPAGATION);
+    }
+    TagContext tagContext = makeTagContext(tags);
+    Map<String, String> carrier = new HashMap<String, String>();
+    textFormat.inject(tagContext, carrier, setter);
+    TagContext actual = textFormat.extract(carrier, getter);
+    assertThat(TagsTestUtil.tagContextToList(actual))
+        .containsExactlyElementsIn(TagsTestUtil.tagContextToList(tagContext));
+  }
+
+  private static TagContext makeTagContext(final Tag... tags) {
+    return new TagContext() {
+      @Override
+      public Iterator<Tag> getIterator() {
+        return Arrays.<Tag>asList(tags).iterator();
+      }
+    };
+  }
+
+  private static String generateRandom(int length) {
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < length; i++) {
+      builder.append(random.nextInt(10));
+    }
+    return builder.toString();
+  }
+}
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImplTest.java
index 26a072f..2448d45 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextBinarySerializerImplTest.java
@@ -24,6 +24,8 @@
 import io.opencensus.tags.Tag;
 import io.opencensus.tags.TagContext;
 import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagMetadata;
+import io.opencensus.tags.TagMetadata.TagTtl;
 import io.opencensus.tags.TagValue;
 import io.opencensus.tags.TaggingState;
 import io.opencensus.tags.TagsComponent;
@@ -47,6 +49,9 @@
   private final TagContextBinarySerializer serializer =
       tagsComponent.getTagPropagationComponent().getBinarySerializer();
 
+  private static final TagMetadata METADATA_NO_PROPAGATION =
+      TagMetadata.create(TagTtl.NO_PROPAGATION);
+
   private final TagContext tagContext =
       new TagContext() {
         @Override
@@ -56,6 +61,17 @@
         }
       };
 
+  private final TagContext tagContextWithNonPropagatingTag =
+      new TagContext() {
+        @Override
+        public Iterator<Tag> getIterator() {
+          return ImmutableSet.<Tag>of(
+                  Tag.create(
+                      TagKey.create("key"), TagValue.create("value"), METADATA_NO_PROPAGATION))
+              .iterator();
+        }
+      };
+
   @Test
   @SuppressWarnings("deprecation")
   public void toByteArray_TaggingDisabled() throws TagContextSerializationException {
@@ -74,6 +90,12 @@
   }
 
   @Test
+  public void toByteArray_SkipNonPropagatingTag() throws TagContextSerializationException {
+    byte[] versionIdBytes = new byte[] {BinarySerializationUtils.VERSION_ID};
+    assertThat(serializer.toByteArray(tagContextWithNonPropagatingTag)).isEqualTo(versionIdBytes);
+  }
+
+  @Test
   @SuppressWarnings("deprecation")
   public void fromByteArray_TaggingDisabled()
       throws TagContextDeserializationException, TagContextSerializationException {
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextDeserializationTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextDeserializationTest.java
index 8db0e38..1460580 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextDeserializationTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextDeserializationTest.java
@@ -37,7 +37,7 @@
 import org.junit.runners.JUnit4;
 
 /**
- * Tests for deserializing tags with {@link SerializationUtils} and {@link
+ * Tests for deserializing tags with {@link BinarySerializationUtils} and {@link
  * TagContextBinarySerializerImpl}.
  */
 @RunWith(JUnit4.class)
@@ -52,10 +52,10 @@
 
   @Test
   public void testConstants() {
-    // Refer to the JavaDoc on SerializationUtils for the definitions on these constants.
-    assertThat(SerializationUtils.VERSION_ID).isEqualTo(0);
-    assertThat(SerializationUtils.TAG_FIELD_ID).isEqualTo(0);
-    assertThat(SerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT).isEqualTo(8192);
+    // Refer to the JavaDoc on BinarySerializationUtils for the definitions on these constants.
+    assertThat(BinarySerializationUtils.VERSION_ID).isEqualTo(0);
+    assertThat(BinarySerializationUtils.TAG_FIELD_ID).isEqualTo(0);
+    assertThat(BinarySerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT).isEqualTo(8192);
   }
 
   @Test
@@ -63,7 +63,9 @@
     TagContext expected = tagger.empty();
     TagContext actual =
         serializer.fromByteArray(
-            new byte[] {SerializationUtils.VERSION_ID}); // One byte that represents Version ID.
+            new byte[] {
+              BinarySerializationUtils.VERSION_ID
+            }); // One byte that represents Version ID.
     assertThat(actual).isEqualTo(expected);
   }
 
@@ -79,8 +81,8 @@
   public void testDeserializeTooLargeByteArrayThrowException()
       throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
-    for (int i = 0; i < SerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
+    output.write(BinarySerializationUtils.VERSION_ID);
+    for (int i = 0; i < BinarySerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
       // Each tag will be with format {key : "0123", value : "0123"}, so the length of it is 8.
       String str;
       if (i < 10) {
@@ -110,8 +112,8 @@
   public void testDeserializeTooLargeByteArrayThrowException_WithDuplicateTagKeys()
       throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
-    for (int i = 0; i < SerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
+    output.write(BinarySerializationUtils.VERSION_ID);
+    for (int i = 0; i < BinarySerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
       // Each tag will be with format {key : "key_", value : "0123"}, so the length of it is 8.
       String str;
       if (i < 10) {
@@ -138,7 +140,7 @@
   @Test
   public void testDeserializeInvalidTagKey() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
 
     // Encode an invalid tag key and a valid tag value:
     encodeTagToOutput("\2key", "value", output);
@@ -152,7 +154,7 @@
   @Test
   public void testDeserializeInvalidTagValue() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
 
     // Encode a valid tag key and an invalid tag value:
     encodeTagToOutput("my key", "val\3", output);
@@ -166,7 +168,7 @@
   @Test
   public void testDeserializeOneTag() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key", "Value", output);
     TagContext expected =
         tagger.emptyBuilder().put(TagKey.create("Key"), TagValue.create("Value")).build();
@@ -176,7 +178,7 @@
   @Test
   public void testDeserializeMultipleTags() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key2", "Value2", output);
     TagContext expected =
@@ -191,7 +193,7 @@
   @Test
   public void testDeserializeDuplicateKeys() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key1", "Value2", output);
     TagContext expected =
@@ -203,7 +205,7 @@
   public void testDeserializeNonConsecutiveDuplicateKeys()
       throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key2", "Value2", output);
     encodeTagToOutput("Key3", "Value3", output);
@@ -222,7 +224,7 @@
   @Test
   public void testDeserializeDuplicateTags() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key1", "Value1", output);
     TagContext expected =
@@ -234,7 +236,7 @@
   public void testDeserializeNonConsecutiveDuplicateTags()
       throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key2", "Value2", output);
     encodeTagToOutput("Key3", "Value3", output);
@@ -253,7 +255,7 @@
   @Test
   public void stopParsingAtUnknownField() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
     encodeTagToOutput("Key1", "Value1", output);
     encodeTagToOutput("Key2", "Value2", output);
 
@@ -276,7 +278,7 @@
   @Test
   public void stopParsingAtUnknownTagAtStart() throws TagContextDeserializationException {
     ByteArrayDataOutput output = ByteStreams.newDataOutput();
-    output.write(SerializationUtils.VERSION_ID);
+    output.write(BinarySerializationUtils.VERSION_ID);
 
     // Write unknown field ID 1.
     output.write(1);
@@ -297,7 +299,7 @@
   public void testDeserializeWrongVersionId() throws TagContextDeserializationException {
     thrown.expect(TagContextDeserializationException.class);
     thrown.expectMessage("Wrong Version ID: 1. Currently supports version up to: 0");
-    serializer.fromByteArray(new byte[] {(byte) (SerializationUtils.VERSION_ID + 1)});
+    serializer.fromByteArray(new byte[] {(byte) (BinarySerializationUtils.VERSION_ID + 1)});
   }
 
   @Test
@@ -314,7 +316,7 @@
   //         <tag_val_len> == varint encoded integer
   //         <tag_val> == tag_val_len bytes comprising UTF-8 string
   private static void encodeTagToOutput(String key, String value, ByteArrayDataOutput output) {
-    output.write(SerializationUtils.TAG_FIELD_ID);
+    output.write(BinarySerializationUtils.TAG_FIELD_ID);
     encodeString(key, output);
     encodeString(value, output);
   }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextRoundtripTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextRoundtripTest.java
index 1b1aa04..71d8f4b 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextRoundtripTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextRoundtripTest.java
@@ -59,7 +59,7 @@
   @Test
   public void testRoundtrip_TagContextWithMaximumSize() throws Exception {
     TagContextBuilder builder = tagger.emptyBuilder();
-    for (int i = 0; i < SerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8; i++) {
+    for (int i = 0; i < BinarySerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8; i++) {
       // Each tag will be with format {key : "0123", value : "0123"}, so the length of it is 8.
       // Add 1024 tags, the total size should just be 8192.
       String str;
diff --git a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextSerializationTest.java b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextSerializationTest.java
index ed68fe3..a4302e7 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextSerializationTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/tags/propagation/TagContextSerializationTest.java
@@ -45,7 +45,7 @@
 import org.junit.runners.JUnit4;
 
 /**
- * Tests for serializing tags with {@link SerializationUtils} and {@link
+ * Tests for serializing tags with {@link BinarySerializationUtils} and {@link
  * TagContextBinarySerializerImpl}.
  */
 @RunWith(JUnit4.class)
@@ -91,7 +91,7 @@
   @Test
   public void testSerializeTooLargeTagContext() throws TagContextSerializationException {
     TagContextBuilder builder = tagger.emptyBuilder();
-    for (int i = 0; i < SerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
+    for (int i = 0; i < BinarySerializationUtils.TAGCONTEXT_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
       // Each tag will be with format {key : "0123", value : "0123"}, so the length of it is 8.
       String str;
       if (i < 10) {
@@ -127,9 +127,9 @@
     Set<String> possibleOutputs = new HashSet<String>();
     for (List<Tag> list : tagPermutation) {
       ByteArrayOutputStream expected = new ByteArrayOutputStream();
-      expected.write(SerializationUtils.VERSION_ID);
+      expected.write(BinarySerializationUtils.VERSION_ID);
       for (Tag tag : list) {
-        expected.write(SerializationUtils.TAG_FIELD_ID);
+        expected.write(BinarySerializationUtils.TAG_FIELD_ID);
         encodeString(tag.getKey().getName(), expected);
         encodeString(tag.getValue().asString(), expected);
       }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/SpanBuilderImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/SpanBuilderImplTest.java
index 3267eac..4ff2302 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/SpanBuilderImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/SpanBuilderImplTest.java
@@ -139,6 +139,33 @@
   }
 
   @Test
+  public void startSpanIncreaseNumberOfChildren() {
+    RecordEventsSpanImpl parent =
+        (RecordEventsSpanImpl)
+            SpanBuilderImpl.createWithParent(SPAN_NAME, null, spanBuilderOptions)
+                .setSampler(Samplers.alwaysSample())
+                .startSpan();
+    assertThat(parent.getContext().getTraceOptions().isSampled()).isTrue();
+    assertThat(parent.toSpanData().getChildSpanCount()).isEqualTo(0);
+    RecordEventsSpanImpl span =
+        (RecordEventsSpanImpl)
+            SpanBuilderImpl.createWithParent(SPAN_NAME, parent, spanBuilderOptions)
+                .setSampler(Samplers.alwaysSample())
+                .startSpan();
+    assertThat(span.getContext().getTraceOptions().isSampled()).isTrue();
+    assertThat(span.toSpanData().getChildSpanCount()).isEqualTo(0);
+    assertThat(parent.toSpanData().getChildSpanCount()).isEqualTo(1);
+    span =
+        (RecordEventsSpanImpl)
+            SpanBuilderImpl.createWithParent(SPAN_NAME, parent, spanBuilderOptions)
+                .setSampler(Samplers.alwaysSample())
+                .startSpan();
+    assertThat(span.getContext().getTraceOptions().isSampled()).isTrue();
+    assertThat(span.toSpanData().getChildSpanCount()).isEqualTo(0);
+    assertThat(parent.toSpanData().getChildSpanCount()).isEqualTo(2);
+  }
+
+  @Test
   public void startSpanNullParentNoRecordOptions() {
     Span span =
         SpanBuilderImpl.createWithParent(SPAN_NAME, null, spanBuilderOptions)
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/export/ExportComponentImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/export/ExportComponentImplTest.java
index 4b8993f..041082a 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/export/ExportComponentImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/export/ExportComponentImplTest.java
@@ -40,9 +40,9 @@
   @Test
   public void implementationOfActiveSpans() {
     assertThat(exportComponentWithInProcess.getRunningSpanStore())
-        .isInstanceOf(InProcessRunningSpanStoreImpl.class);
+        .isInstanceOf(InProcessRunningSpanStore.class);
     assertThat(exportComponentWithoutInProcess.getRunningSpanStore())
-        .isInstanceOf(RunningSpanStoreImpl.getNoopRunningSpanStoreImpl().getClass());
+        .isInstanceOf(InProcessRunningSpanStore.class);
   }
 
   @Test
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImplTest.java
index 68ce1c1..dfbbae9 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/export/InProcessRunningSpanStoreImplTest.java
@@ -28,14 +28,16 @@
 import io.opencensus.trace.SpanId;
 import io.opencensus.trace.TraceId;
 import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
 import io.opencensus.trace.config.TraceParams;
 import io.opencensus.trace.export.RunningSpanStore.Filter;
 import java.util.Random;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
-/** Unit tests for {@link InProcessRunningSpanStoreImpl}. */
+/** Unit tests for {@link InProcessRunningSpanStore}. */
 @RunWith(JUnit4.class)
 public class InProcessRunningSpanStoreImplTest {
 
@@ -44,18 +46,23 @@
   private final Random random = new Random(1234);
   private final SpanExporterImpl sampledSpansServiceExporter =
       SpanExporterImpl.create(4, Duration.create(1, 0));
-  private final InProcessRunningSpanStoreImpl activeSpansExporter =
-      new InProcessRunningSpanStoreImpl();
+  private final InProcessRunningSpanStore activeSpansExporter = new InProcessRunningSpanStore();
   private final StartEndHandler startEndHandler =
       new StartEndHandlerImpl(
           sampledSpansServiceExporter, activeSpansExporter, null, new SimpleEventQueue());
 
+  @Before
+  public void setUp() {
+    activeSpansExporter.setMaxNumberOfSpans(10);
+  }
+
   private RecordEventsSpanImpl createSpan(String spanName) {
     final SpanContext spanContext =
         SpanContext.create(
             TraceId.generateRandomId(random),
             SpanId.generateRandomId(random),
-            TraceOptions.DEFAULT);
+            TraceOptions.DEFAULT,
+            Tracestate.builder().build());
     return RecordEventsSpanImpl.startSpan(
         spanContext,
         spanName,
@@ -165,4 +172,23 @@
     span2.end();
     span3.end();
   }
+
+  @Test
+  public void setMaxNumberOfSpans() {
+    RecordEventsSpanImpl span1 = createSpan(SPAN_NAME_1);
+    RecordEventsSpanImpl span2 = createSpan(SPAN_NAME_2);
+    assertThat(activeSpansExporter.getSummary().getPerSpanNameSummary().size()).isEqualTo(2);
+    // This will reset all the spans.
+    activeSpansExporter.setMaxNumberOfSpans(10);
+    assertThat(activeSpansExporter.getSummary().getPerSpanNameSummary().size()).isEqualTo(0);
+    span1.end();
+    span2.end();
+    // Add spans again.
+    RecordEventsSpanImpl span3 = createSpan(SPAN_NAME_1);
+    RecordEventsSpanImpl span4 = createSpan(SPAN_NAME_2);
+    assertThat(activeSpansExporter.getSummary().getPerSpanNameSummary().size()).isEqualTo(2);
+    span3.end();
+    span4.end();
+    assertThat(activeSpansExporter.getSummary().getPerSpanNameSummary().size()).isEqualTo(0);
+  }
 }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopRunningSpanStoreImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopInProcessRunningSpanStoreTest.java
similarity index 94%
rename from impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopRunningSpanStoreImplTest.java
rename to impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopInProcessRunningSpanStoreTest.java
index 96669df..c6acf9d 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopRunningSpanStoreImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/export/NoopInProcessRunningSpanStoreTest.java
@@ -39,9 +39,9 @@
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
-/** Unit tests for {@link RunningSpanStoreImpl.NoopRunningSpanStoreImpl}. */
+/** Unit tests for noop {@link InProcessRunningSpanStore}. */
 @RunWith(JUnit4.class)
-public class NoopRunningSpanStoreImplTest {
+public class NoopInProcessRunningSpanStoreTest {
 
   private static final String SPAN_NAME = "MySpanName";
 
@@ -57,7 +57,7 @@
   // maxSpansToReturn=0 means all
   private final Filter filter = Filter.create(SPAN_NAME, 0 /* maxSpansToReturn */);
   private final EventQueue eventQueue = new SimpleEventQueue();
-  private final RunningSpanStoreImpl runningSpanStoreImpl =
+  private final InProcessRunningSpanStore runningSpanStoreImpl =
       ExportComponentImpl.createWithoutInProcessStores(eventQueue).getRunningSpanStore();
 
   @Before
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/export/SpanExporterImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/export/SpanExporterImplTest.java
index f8f1d91..4bc8739 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/export/SpanExporterImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/export/SpanExporterImplTest.java
@@ -17,7 +17,6 @@
 package io.opencensus.implcore.trace.export;
 
 import static com.google.common.truth.Truth.assertThat;
-import static org.mockito.Matchers.anyListOf;
 import static org.mockito.Mockito.doThrow;
 
 import io.opencensus.common.Duration;
@@ -31,15 +30,20 @@
 import io.opencensus.trace.SpanId;
 import io.opencensus.trace.TraceId;
 import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
 import io.opencensus.trace.config.TraceParams;
 import io.opencensus.trace.export.SpanData;
 import io.opencensus.trace.export.SpanExporter.Handler;
+import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 import java.util.Random;
+import javax.annotation.concurrent.GuardedBy;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
+import org.mockito.ArgumentMatchers;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
@@ -53,11 +57,17 @@
       SpanContext.create(
           TraceId.generateRandomId(random),
           SpanId.generateRandomId(random),
-          TraceOptions.builder().setIsSampled(true).build());
+          TraceOptions.builder().setIsSampled(true).build(),
+          Tracestate.builder().build());
   private final SpanContext notSampledSpanContext =
       SpanContext.create(
-          TraceId.generateRandomId(random), SpanId.generateRandomId(random), TraceOptions.DEFAULT);
-  private final RunningSpanStoreImpl runningSpanStore = new InProcessRunningSpanStoreImpl();
+          TraceId.generateRandomId(random),
+          SpanId.generateRandomId(random),
+          TraceOptions.DEFAULT,
+          Tracestate.builder().build());
+  private final InProcessRunningSpanStore runningSpanStore = new InProcessRunningSpanStore();
+  private final SampledSpanStoreImpl sampledSpanStore =
+      SampledSpanStoreImpl.getNoopSampledSpanStoreImpl();
   private final TestHandler serviceHandler = new TestHandler();
   @Mock private Handler mockServiceHandler;
 
@@ -104,7 +114,8 @@
   public void exportDifferentSampledSpans() {
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
@@ -118,7 +129,8 @@
   public void exportMoreSpansThanTheBufferSize() {
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
@@ -139,6 +151,86 @@
             span6.toSpanData());
   }
 
+  private static class BlockingExporter extends Handler {
+    final Object monitor = new Object();
+
+    @GuardedBy("monitor")
+    Boolean condition = Boolean.FALSE;
+
+    @Override
+    public void export(Collection<SpanData> spanDataList) {
+      synchronized (monitor) {
+        while (!condition) {
+          try {
+            monitor.wait();
+          } catch (InterruptedException e) {
+            // Do nothing
+          }
+        }
+      }
+    }
+
+    private void unblock() {
+      synchronized (monitor) {
+        condition = Boolean.TRUE;
+        monitor.notifyAll();
+      }
+    }
+  }
+
+  @Test
+  public void exportMoreSpansThanTheMaximumLimit() {
+    final int bufferSize = 4;
+    final int maxReferencedSpans = bufferSize * 4;
+    SpanExporterImpl spanExporter = SpanExporterImpl.create(bufferSize, Duration.create(1, 0));
+    StartEndHandler startEndHandler =
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
+    BlockingExporter blockingExporter = new BlockingExporter();
+
+    spanExporter.registerHandler("test.service", serviceHandler);
+    spanExporter.registerHandler("test.blocking", blockingExporter);
+
+    List<SpanData> spansToExport = new ArrayList<>(maxReferencedSpans);
+    for (int i = 0; i < maxReferencedSpans; i++) {
+      spansToExport.add(createSampledEndedSpan(startEndHandler, "span_1_" + i).toSpanData());
+    }
+
+    assertThat(spanExporter.getReferencedSpans()).isEqualTo(maxReferencedSpans);
+
+    // Now we should start dropping.
+    for (int i = 0; i < 7; i++) {
+      createSampledEndedSpan(startEndHandler, "span_2_" + i);
+      assertThat(spanExporter.getDroppedSpans()).isEqualTo(i + 1);
+    }
+
+    assertThat(spanExporter.getReferencedSpans()).isEqualTo(maxReferencedSpans);
+
+    // Release the blocking exporter
+    blockingExporter.unblock();
+
+    List<SpanData> exported = serviceHandler.waitForExport(maxReferencedSpans);
+    assertThat(exported).isNotNull();
+    assertThat(exported).containsExactlyElementsIn(spansToExport);
+    exported.clear();
+    spansToExport.clear();
+
+    // We cannot compare with maxReferencedSpans here because the worker thread may get
+    // unscheduled immediately after exporting, but before updating the pushed spans, if that is
+    // the case at most bufferSize spans will miss.
+    assertThat(spanExporter.getPushedSpans()).isAtLeast((long) maxReferencedSpans - bufferSize);
+
+    for (int i = 0; i < 7; i++) {
+      spansToExport.add(createSampledEndedSpan(startEndHandler, "span_3_" + i).toSpanData());
+      // No more dropped spans.
+      assertThat(spanExporter.getDroppedSpans()).isEqualTo(7);
+    }
+
+    exported = serviceHandler.waitForExport(7);
+    assertThat(exported).isNotNull();
+    assertThat(exported).containsExactlyElementsIn(spansToExport);
+  }
+
   @Test
   public void interruptWorkerThreadStops() throws InterruptedException {
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
@@ -155,11 +247,12 @@
   public void serviceHandlerThrowsException() {
     doThrow(new IllegalArgumentException("No export for you."))
         .when(mockServiceHandler)
-        .export(anyListOf(SpanData.class));
+        .export(ArgumentMatchers.<SpanData>anyList());
 
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
@@ -177,7 +270,8 @@
   public void exportSpansToMultipleServices() {
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
@@ -195,7 +289,8 @@
   public void exportNotSampledSpans() {
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(1, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
@@ -217,7 +312,8 @@
     // Set the export delay to zero, for no timeout, in order to confirm the #flush() below works
     SpanExporterImpl spanExporter = SpanExporterImpl.create(4, Duration.create(0, 0));
     StartEndHandler startEndHandler =
-        new StartEndHandlerImpl(spanExporter, runningSpanStore, null, new SimpleEventQueue());
+        new StartEndHandlerImpl(
+            spanExporter, runningSpanStore, sampledSpanStore, new SimpleEventQueue());
 
     spanExporter.registerHandler("test.service", serviceHandler);
 
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveListTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveListTest.java
index d7ac2ae..ba73ebb 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveListTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/internal/ConcurrentIntrusiveListTest.java
@@ -19,19 +19,18 @@
 import static com.google.common.truth.Truth.assertThat;
 
 import io.opencensus.implcore.trace.internal.ConcurrentIntrusiveList.Element;
+import java.util.ArrayList;
 import javax.annotation.Nullable;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
 /** Unit tests for {@link ConcurrentIntrusiveList}. */
 @RunWith(JUnit4.class)
 public class ConcurrentIntrusiveListTest {
+  private static final int CAPACITY = 5;
   private final ConcurrentIntrusiveList<FakeElement> intrusiveList =
-      new ConcurrentIntrusiveList<FakeElement>();
-  @Rule public final ExpectedException exception = ExpectedException.none();
+      new ConcurrentIntrusiveList<>(CAPACITY);
 
   @Test
   public void emptyList() {
@@ -42,11 +41,11 @@
   @Test
   public void addRemoveAdd_SameElement() {
     FakeElement element = new FakeElement();
-    intrusiveList.addElement(element);
+    assertThat(intrusiveList.addElement(element)).isTrue();
     assertThat(intrusiveList.size()).isEqualTo(1);
-    intrusiveList.removeElement(element);
+    assertThat(intrusiveList.removeElement(element)).isTrue();
     assertThat(intrusiveList.size()).isEqualTo(0);
-    intrusiveList.addElement(element);
+    assertThat(intrusiveList.addElement(element)).isTrue();
     assertThat(intrusiveList.size()).isEqualTo(1);
   }
 
@@ -82,18 +81,69 @@
   }
 
   @Test
+  public void clear() {
+    FakeElement element1 = new FakeElement();
+    FakeElement element2 = new FakeElement();
+    FakeElement element3 = new FakeElement();
+    intrusiveList.addElement(element1);
+    intrusiveList.addElement(element2);
+    intrusiveList.addElement(element3);
+    assertThat(intrusiveList.size()).isEqualTo(3);
+    intrusiveList.clear();
+    // Check that elements are no longer in the list.
+    assertThat(intrusiveList.removeElement(element1)).isFalse();
+    assertThat(intrusiveList.removeElement(element2)).isFalse();
+    assertThat(intrusiveList.removeElement(element3)).isFalse();
+  }
+
+  @Test
+  public void addMoreThanCapacity() {
+    ArrayList<FakeElement> elements = new ArrayList<>(2 * CAPACITY);
+    for (int i = 0; i < CAPACITY; i++) {
+      FakeElement element = new FakeElement();
+      elements.add(element);
+      assertThat(intrusiveList.addElement(element)).isTrue();
+    }
+
+    assertThat(intrusiveList.size()).isEqualTo(CAPACITY);
+
+    // Try to add more elements. All will fail.
+    for (int i = 0; i < CAPACITY; i++) {
+      assertThat(intrusiveList.addElement(new FakeElement())).isFalse();
+      assertThat(intrusiveList.size()).isEqualTo(CAPACITY);
+    }
+
+    // Check that the first CAPACITY elements are not in the list.
+    for (int i = 0; i < CAPACITY; i++) {
+      assertThat(intrusiveList.removeElement(elements.get(i))).isTrue();
+    }
+  }
+
+  @Test
+  public void addMoreThanCapacity_ThenRemoveAndAdd() {
+    ArrayList<FakeElement> elements = new ArrayList<>(2 * CAPACITY);
+    for (int i = 0; i < CAPACITY; i++) {
+      FakeElement element = new FakeElement();
+      elements.add(element);
+      assertThat(intrusiveList.addElement(element)).isTrue();
+    }
+
+    assertThat(intrusiveList.removeElement(elements.get(CAPACITY / 2))).isTrue();
+
+    // Now we can add another element
+    assertThat(intrusiveList.addElement(new FakeElement())).isTrue();
+  }
+
+  @Test
   public void addAlreadyAddedElement() {
     FakeElement element = new FakeElement();
-    intrusiveList.addElement(element);
-    exception.expect(IllegalArgumentException.class);
-    intrusiveList.addElement(element);
+    assertThat(intrusiveList.addElement(element)).isTrue();
+    assertThat(intrusiveList.addElement(element)).isFalse();
   }
 
   @Test
   public void removeNotAddedElement() {
-    FakeElement element = new FakeElement();
-    exception.expect(IllegalArgumentException.class);
-    intrusiveList.removeElement(element);
+    assertThat(intrusiveList.removeElement(new FakeElement())).isFalse();
   }
 
   private static final class FakeElement implements Element<FakeElement> {
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/PropagationComponentImplTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/PropagationComponentImplTest.java
index 00ed90f..f9e4b3c 100644
--- a/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/PropagationComponentImplTest.java
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/PropagationComponentImplTest.java
@@ -37,4 +37,9 @@
   public void implementationOfB3Format() {
     assertThat(propagationComponent.getB3Format()).isInstanceOf(B3Format.class);
   }
+
+  @Test
+  public void implementationOfTraceContextFormat() {
+    assertThat(propagationComponent.getTraceContextFormat()).isInstanceOf(TraceContextFormat.class);
+  }
 }
diff --git a/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/TraceContextFormatTest.java b/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/TraceContextFormatTest.java
new file mode 100644
index 0000000..ea7d3f4
--- /dev/null
+++ b/impl_core/src/test/java/io/opencensus/implcore/trace/propagation/TraceContextFormatTest.java
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.implcore.trace.propagation;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.implcore.trace.propagation.TraceContextFormat.TRACEPARENT;
+import static io.opencensus.implcore.trace.propagation.TraceContextFormat.TRACESTATE;
+
+import io.opencensus.trace.SpanContext;
+import io.opencensus.trace.SpanId;
+import io.opencensus.trace.TraceId;
+import io.opencensus.trace.TraceOptions;
+import io.opencensus.trace.Tracestate;
+import io.opencensus.trace.propagation.SpanContextParseException;
+import io.opencensus.trace.propagation.TextFormat.Getter;
+import io.opencensus.trace.propagation.TextFormat.Setter;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import javax.annotation.Nullable;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link TraceContextFormat}. */
+@RunWith(JUnit4.class)
+public class TraceContextFormatTest {
+
+  private static final Tracestate TRACESTATE_DEFAULT = Tracestate.builder().build();
+  private static final Tracestate TRACESTATE_NOT_DEFAULT =
+      Tracestate.builder().set("foo", "bar").set("bar", "baz").build();
+  private static final String TRACE_ID_BASE16 = "ff000000000000000000000000000041";
+  private static final TraceId TRACE_ID = TraceId.fromLowerBase16(TRACE_ID_BASE16);
+  private static final String SPAN_ID_BASE16 = "ff00000000000041";
+  private static final SpanId SPAN_ID = SpanId.fromLowerBase16(SPAN_ID_BASE16);
+  private static final byte SAMPLED_TRACE_OPTIONS_BYTES = 1;
+  private static final TraceOptions SAMPLED_TRACE_OPTIONS =
+      TraceOptions.fromByte(SAMPLED_TRACE_OPTIONS_BYTES);
+  private static final String TRACEPARENT_HEADER_SAMPLED =
+      "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-01";
+  private static final String TRACEPARENT_HEADER_NOT_SAMPLED =
+      "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-00";
+  private static final Setter<Map<String, String>> setter =
+      new Setter<Map<String, String>>() {
+        @Override
+        public void put(Map<String, String> carrier, String key, String value) {
+          carrier.put(key, value);
+        }
+      };
+  private static final Getter<Map<String, String>> getter =
+      new Getter<Map<String, String>>() {
+        @Nullable
+        @Override
+        public String get(Map<String, String> carrier, String key) {
+          return carrier.get(key);
+        }
+      };
+  // Encoding preserves the order which is the reverse order of adding.
+  private static final String TRACESTATE_NOT_DEFAULT_ENCODING = "bar=baz,foo=bar";
+  private final TraceContextFormat traceContextFormat = new TraceContextFormat();
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void inject_SampledContext() {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    traceContextFormat.inject(
+        SpanContext.create(TRACE_ID, SPAN_ID, SAMPLED_TRACE_OPTIONS, TRACESTATE_DEFAULT),
+        carrier,
+        setter);
+    assertThat(carrier).containsExactly(TRACEPARENT, TRACEPARENT_HEADER_SAMPLED);
+  }
+
+  @Test
+  public void inject_NotSampledContext() {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    traceContextFormat.inject(
+        SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_DEFAULT),
+        carrier,
+        setter);
+    assertThat(carrier).containsExactly(TRACEPARENT, TRACEPARENT_HEADER_NOT_SAMPLED);
+  }
+
+  @Test
+  public void inject_SampledContext_WithTraceState() {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    traceContextFormat.inject(
+        SpanContext.create(TRACE_ID, SPAN_ID, SAMPLED_TRACE_OPTIONS, TRACESTATE_NOT_DEFAULT),
+        carrier,
+        setter);
+    assertThat(carrier)
+        .containsExactly(
+            TRACEPARENT, TRACEPARENT_HEADER_SAMPLED, TRACESTATE, TRACESTATE_NOT_DEFAULT_ENCODING);
+  }
+
+  @Test
+  public void inject_NotSampledContext_WithTraceState() {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    traceContextFormat.inject(
+        SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_NOT_DEFAULT),
+        carrier,
+        setter);
+    assertThat(carrier)
+        .containsExactly(
+            TRACEPARENT,
+            TRACEPARENT_HEADER_NOT_SAMPLED,
+            TRACESTATE,
+            TRACESTATE_NOT_DEFAULT_ENCODING);
+  }
+
+  @Test
+  public void extract_SampledContext() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_SAMPLED);
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(
+            SpanContext.create(TRACE_ID, SPAN_ID, SAMPLED_TRACE_OPTIONS, TRACESTATE_DEFAULT));
+  }
+
+  @Test
+  public void extract_NotSampledContext() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_NOT_SAMPLED);
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_DEFAULT));
+  }
+
+  @Test
+  public void extract_SampledContext_WithTraceState() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_SAMPLED);
+    carrier.put(TRACESTATE, TRACESTATE_NOT_DEFAULT_ENCODING);
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(
+            SpanContext.create(TRACE_ID, SPAN_ID, SAMPLED_TRACE_OPTIONS, TRACESTATE_NOT_DEFAULT));
+  }
+
+  @Test
+  public void extract_NotSampledContext_WithTraceState() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_NOT_SAMPLED);
+    carrier.put(TRACESTATE, TRACESTATE_NOT_DEFAULT_ENCODING);
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(
+            SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_NOT_DEFAULT));
+  }
+
+  @Test
+  public void extract_NotSampledContext_NextVersion() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, "01-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-00-02");
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_DEFAULT));
+  }
+
+  @Test
+  public void extract_NotSampledContext_EmptyTraceState() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_NOT_SAMPLED);
+    carrier.put(TRACESTATE, "");
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_DEFAULT));
+  }
+
+  @Test
+  public void extract_NotSampledContext_TraceStateWithSpaces() throws SpanContextParseException {
+    Map<String, String> carrier = new LinkedHashMap<String, String>();
+    carrier.put(TRACEPARENT, TRACEPARENT_HEADER_NOT_SAMPLED);
+    carrier.put(TRACESTATE, "foo=bar   ,    bar=baz");
+    assertThat(traceContextFormat.extract(carrier, getter))
+        .isEqualTo(
+            SpanContext.create(TRACE_ID, SPAN_ID, TraceOptions.DEFAULT, TRACESTATE_NOT_DEFAULT));
+  }
+
+  @Test
+  public void extract_InvalidTraceId() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new LinkedHashMap<String, String>();
+    invalidHeaders.put(
+        TRACEPARENT, "00-" + "abcdefghijklmnopabcdefghijklmnop" + "-" + SPAN_ID_BASE16 + "-01");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: "
+            + "00-"
+            + "abcdefghijklmnopabcdefghijklmnop"
+            + "-"
+            + SPAN_ID_BASE16
+            + "-01");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTraceId_Size() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new LinkedHashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "00-" + SPAN_ID_BASE16 + "-01");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: " + "00-" + TRACE_ID_BASE16 + "00-" + SPAN_ID_BASE16 + "-01");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidSpanId() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + "abcdefghijklmnop" + "-01");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: " + "00-" + TRACE_ID_BASE16 + "-" + "abcdefghijklmnop" + "-01");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidSpanId_Size() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "00-01");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: " + "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "00-01");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTraceOptions() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-gh");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: " + "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-gh");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTraceOptions_Size() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-0100");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage(
+        "Invalid traceparent: " + "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-0100");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTracestate_EntriesDelimiter() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-01");
+    invalidHeaders.put(TRACESTATE, "foo=bar;test=test");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage("Invalid tracestate: " + "foo=bar;test=test");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTracestate_KeyValueDelimiter() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-01");
+    invalidHeaders.put(TRACESTATE, "foo=bar,test-test");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage("Invalid tracestate: " + "foo=bar,test-test");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void extract_InvalidTracestate_OneString() throws SpanContextParseException {
+    Map<String, String> invalidHeaders = new HashMap<String, String>();
+    invalidHeaders.put(TRACEPARENT, "00-" + TRACE_ID_BASE16 + "-" + SPAN_ID_BASE16 + "-01");
+    invalidHeaders.put(TRACESTATE, "test-test");
+    thrown.expect(SpanContextParseException.class);
+    thrown.expectMessage("Invalid tracestate: " + "test-test");
+    traceContextFormat.extract(invalidHeaders, getter);
+  }
+
+  @Test
+  public void fieldsList() {
+    assertThat(traceContextFormat.fields()).containsExactly(TRACEPARENT, TRACESTATE);
+  }
+
+  @Test
+  public void headerNames() {
+    assertThat(TRACEPARENT).isEqualTo("traceparent");
+    assertThat(TRACESTATE).isEqualTo("tracestate");
+  }
+}
diff --git a/impl_lite/build.gradle b/impl_lite/build.gradle
index b8692fd..1a4b5ab 100644
--- a/impl_lite/build.gradle
+++ b/impl_lite/build.gradle
@@ -4,9 +4,6 @@
     compile project(':opencensus-api'),
             project(':opencensus-impl-core')
 
-    testCompile project(':opencensus-api'),
-            project(':opencensus-impl-core')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/scripts/travis_script b/scripts/travis_script
deleted file mode 100755
index 7b7bec5..0000000
--- a/scripts/travis_script
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-#
-# Travis build script, cf.
-# https://docs.travis-ci.com/user/customizing-the-build/#Implementing-Complex-Build-Steps.
-
-set -o errexit
-set -o xtrace
-
-case "$TASK" in
-  "CHECK_GIT_HISTORY")
-    python "$(dirname "$0")"/check-git-history.py
-    ;;
-  "BUILD")
-    case "$TRAVIS_OS_NAME" in
-      "linux")
-        source /opt/jdk_switcher/jdk_switcher.sh
-        export JAVA8_HOME="$(jdk_switcher home oraclejdk8)"
-        case "$TRAVIS_JDK_VERSION" in
-          "oraclejdk9")
-            ./gradlew clean assemble check --stacktrace
-            ;;
-          "oraclejdk8")
-            export JAVA_HOMES="$(jdk_switcher home openjdk6)/jre:$(jdk_switcher home openjdk7)/jre:$(jdk_switcher home oraclejdk8)/jre:$(jdk_switcher home oraclejdk9)"
-            ./gradlew clean assemble --stacktrace
-            ./gradlew check :opencensus-all:jacocoTestReport
-            ./gradlew verGJF
-            ;;
-          "openjdk7")
-            # "./gradlew classes testClasses" is a workaround for
-            # https://github.com/gradle/gradle/issues/2421.
-            # See https://github.com/gradle/gradle/issues/2421#issuecomment-319916874.
-            JAVA_HOME="$(jdk_switcher home openjdk8)" ./gradlew classes testClasses
-            ./gradlew clean assemble --stacktrace
-            ./gradlew check
-            ;;
-          *)
-            echo "Unknown JDK version $TRAVIS_JDK_VERSION"
-            exit 1
-            ;;
-        esac
-        ;;
-      "osx")
-        # OS X is a separate case, because the JDK version is determined by the OS X image:
-        # https://docs.travis-ci.com/user/reference/osx/#JDK-and-OS-X
-        ./gradlew clean assemble --stacktrace
-        ./gradlew check
-        ;;
-      *)
-        echo "Unknown OS name $TRAVIS_OS_NAME"
-        exit 1
-        ;;
-    esac
-    ;;
-  "CHECKER_FRAMEWORK")
-    ./gradlew clean assemble -PcheckerFramework=true
-    ;;
-  "CHECK_EXAMPLES_LICENSE")
-    curl -L -o checkstyle-8.12-all.jar https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.12/checkstyle-8.12-all.jar
-    java -DrootDir=. -jar checkstyle-8.12-all.jar -c buildscripts/checkstyle.xml examples/src/
-    ;;
-  "CHECK_EXAMPLES_FORMAT")
-    curl -L -o google-java-format-1.5-all-deps.jar https://github.com/google/google-java-format/releases/download/google-java-format-1.5/google-java-format-1.5-all-deps.jar
-    java -jar google-java-format-1.5-all-deps.jar --set-exit-if-changed --dry-run `find examples/src/ -name '*.java'`
-    ;;
-  "BUILD_EXAMPLES_GRADLE")
-    pushd examples && ./gradlew clean assemble --stacktrace && popd
-    ;;
-  "BUILD_EXAMPLES_MAVEN")
-    pushd examples && mvn clean package appassembler:assemble -e && popd
-    ;;
-  "BUILD_EXAMPLES_BAZEL")
-    pushd examples && bazel clean && bazel build :all && popd
-    ;;
-  *)
-    echo "Unknown task $TASK"
-    exit 1
-    ;;
-esac
diff --git a/settings.gradle b/settings.gradle
index 75060d2..19ffa35 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -1,32 +1,48 @@
 rootProject.name = "opencensus-java"
 
+include ":opencensus-all"
 include ":opencensus-api"
+include ":opencensus-benchmarks"
 include ":opencensus-impl-core"
 include ":opencensus-impl-lite"
 include ":opencensus-impl"
 include ":opencensus-testing"
+include ":opencensus-exporter-metrics-ocagent"
+include ":opencensus-exporter-metrics-util"
+include ":opencensus-exporter-trace-datadog"
+include ":opencensus-exporter-trace-elasticsearch"
 include ":opencensus-exporter-trace-instana"
 include ":opencensus-exporter-trace-logging"
 include ":opencensus-exporter-trace-ocagent"
 include ":opencensus-exporter-trace-stackdriver"
 include ":opencensus-exporter-trace-zipkin"
 include ":opencensus-exporter-trace-jaeger"
+include ":opencensus-exporter-trace-util"
 include ":opencensus-exporter-stats-signalfx"
 include ":opencensus-exporter-stats-stackdriver"
 include ":opencensus-exporter-stats-prometheus"
 include ":opencensus-contrib-agent"
 include ":opencensus-contrib-appengine-standard-util"
 include ":opencensus-contrib-dropwizard"
+include ":opencensus-contrib-dropwizard5"
 include ":opencensus-contrib-exemplar-util"
 include ":opencensus-contrib-grpc-metrics"
 include ":opencensus-contrib-grpc-util"
+include ":opencensus-contrib-http-jaxrs"
+include ":opencensus-contrib-http-jetty-client"
+include ":opencensus-contrib-http-servlet"
 include ":opencensus-contrib-http-util"
 include ":opencensus-contrib-log-correlation-stackdriver"
-include ":opencensus-contrib-monitored-resource-util"
+include ":opencensus-contrib-observability-ready-util"
+include ":opencensus-contrib-resource-util"
 include ":opencensus-contrib-spring"
 include ":opencensus-contrib-spring-sleuth-v1x"
+include ":opencensus-contrib-spring-starter"
+include ":opencensus-contrib-zpages"
 
+project(':opencensus-all').projectDir = "$rootDir/all" as File
 project(':opencensus-api').projectDir = "$rootDir/api" as File
+project(':opencensus-benchmarks').projectDir = "$rootDir/benchmarks" as File
 project(':opencensus-impl-core').projectDir = "$rootDir/impl_core" as File
 project(':opencensus-impl-lite').projectDir = "$rootDir/impl_lite" as File
 project(':opencensus-impl').projectDir = "$rootDir/impl" as File
@@ -35,23 +51,39 @@
 project(':opencensus-contrib-appengine-standard-util').projectDir =
         "$rootDir/contrib/appengine_standard_util" as File
 project(':opencensus-contrib-dropwizard').projectDir = "$rootDir/contrib/dropwizard" as File
+project(':opencensus-contrib-dropwizard5').projectDir = "$rootDir/contrib/dropwizard5" as File
 project(':opencensus-contrib-exemplar-util').projectDir = "$rootDir/contrib/exemplar_util" as File
 project(':opencensus-contrib-grpc-metrics').projectDir = "$rootDir/contrib/grpc_metrics" as File
 project(':opencensus-contrib-grpc-util').projectDir = "$rootDir/contrib/grpc_util" as File
+project(':opencensus-contrib-http-jaxrs').projectDir = "$rootDir/contrib/http_jaxrs" as File
+project(':opencensus-contrib-http-jetty-client').projectDir =
+        "$rootDir/contrib/http_jetty_client" as File
+project(':opencensus-contrib-http-servlet').projectDir = "$rootDir/contrib/http_servlet" as File
 project(':opencensus-contrib-http-util').projectDir = "$rootDir/contrib/http_util" as File
 project(':opencensus-contrib-log-correlation-stackdriver').projectDir =
         "$rootDir/contrib/log_correlation/stackdriver" as File
-project(':opencensus-contrib-monitored-resource-util').projectDir =
-        "$rootDir/contrib/monitored_resource_util" as File
+project(':opencensus-contrib-observability-ready-util').projectDir =
+        "$rootDir/contrib/observability_ready_util" as File
+project(':opencensus-contrib-resource-util').projectDir = "$rootDir/contrib/resource_util" as File
 project(':opencensus-contrib-spring').projectDir = "$rootDir/contrib/spring" as File
 project(':opencensus-contrib-spring-sleuth-v1x').projectDir =
         "$rootDir/contrib/spring_sleuth_v1x" as File
+project(':opencensus-contrib-spring-starter').projectDir = "$rootDir/contrib/spring_starter" as File
+project(':opencensus-contrib-zpages').projectDir = "$rootDir/contrib/zpages" as File
+project(':opencensus-exporter-metrics-ocagent').projectDir =
+        "$rootDir/exporters/metrics/ocagent" as File
+project(':opencensus-exporter-metrics-util').projectDir =
+        "$rootDir/exporters/metrics/util" as File
 project(':opencensus-exporter-stats-signalfx').projectDir =
         "$rootDir/exporters/stats/signalfx" as File
 project(':opencensus-exporter-stats-stackdriver').projectDir =
         "$rootDir/exporters/stats/stackdriver" as File
 project(':opencensus-exporter-stats-prometheus').projectDir =
         "$rootDir/exporters/stats/prometheus" as File
+project(':opencensus-exporter-trace-elasticsearch').projectDir =
+        "$rootDir/exporters/trace/elasticsearch" as File
+project(':opencensus-exporter-trace-datadog').projectDir =
+        "$rootDir/exporters/trace/datadog" as File
 project(':opencensus-exporter-trace-instana').projectDir =
         "$rootDir/exporters/trace/instana" as File
 project(':opencensus-exporter-trace-logging').projectDir =
@@ -62,15 +94,4 @@
         "$rootDir/exporters/trace/stackdriver" as File
 project(':opencensus-exporter-trace-zipkin').projectDir = "$rootDir/exporters/trace/zipkin" as File
 project(':opencensus-exporter-trace-jaeger').projectDir = "$rootDir/exporters/trace/jaeger" as File
-
-
-// Java8 projects only
-if (JavaVersion.current().isJava8Compatible()) {
-    include ":opencensus-all"
-    include ":opencensus-benchmarks"
-    include ":opencensus-contrib-zpages"
-
-    project(':opencensus-all').projectDir = "$rootDir/all" as File
-    project(':opencensus-benchmarks').projectDir = "$rootDir/benchmarks" as File
-    project(':opencensus-contrib-zpages').projectDir = "$rootDir/contrib/zpages" as File
-}
+project(':opencensus-exporter-trace-util').projectDir = "$rootDir/exporters/trace/util" as File
diff --git a/testing/build.gradle b/testing/build.gradle
index 811b059..aabafe5 100644
--- a/testing/build.gradle
+++ b/testing/build.gradle
@@ -4,8 +4,6 @@
     compile project(':opencensus-api'),
             libraries.guava
 
-    testCompile project(':opencensus-api')
-
     signature "org.codehaus.mojo.signature:java17:1.0@signature"
     signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
 }
diff --git a/testing/src/main/java/io/opencensus/testing/export/TestHandler.java b/testing/src/main/java/io/opencensus/testing/export/TestHandler.java
index 6d73aff..3269947 100644
--- a/testing/src/main/java/io/opencensus/testing/export/TestHandler.java
+++ b/testing/src/main/java/io/opencensus/testing/export/TestHandler.java
@@ -20,7 +20,6 @@
 import io.opencensus.trace.export.SpanExporter;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.LinkedList;
 import java.util.List;
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.GuardedBy;
@@ -34,10 +33,8 @@
 
   private final Object monitor = new Object();
 
-  // TODO: Decide whether to use a different class instead of LinkedList.
   @GuardedBy("monitor")
-  @SuppressWarnings("JdkObsolete")
-  private final List<SpanData> spanDataList = new LinkedList<SpanData>();
+  private final List<SpanData> spanDataList = new ArrayList<>();
 
   @Override
   public void export(Collection<SpanData> spanDataList) {
@@ -69,7 +66,7 @@
           return null;
         }
       }
-      ret = new ArrayList<SpanData>(spanDataList);
+      ret = new ArrayList<>(spanDataList);
       spanDataList.clear();
     }
     return ret;