Merge Android 12

Bug: 202323961
Merged-In: Ia96b39cc2cea72949f10593952abfb9f9901d682
Change-Id: I2feac8f91c7b6cbf032fd3c4ca519fd2ae8e6657
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..d97975c
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,3 @@
+third_party {
+  license_type: NOTICE
+}
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..8fc6106
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,6 @@
+zhijunhe@google.com
+cychen@google.com
+lidawang@google.com
+yromanenko@google.com
+ianchien@google.com
+etalvala@google.com
diff --git a/common/apex_update_listener/Android.bp b/common/apex_update_listener/Android.bp
new file mode 100644
index 0000000..377b99a
--- /dev/null
+++ b/common/apex_update_listener/Android.bp
@@ -0,0 +1,54 @@
+// This library is meant to be fairly independent, and dependencies were can be
+// kept to a minimum so that it can be statically linked into both rlsservice
+// and GCH
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_defaults {
+    name: "apex_update_listener_cc_defaults_internal",
+    host_supported: true,
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-Wthread-safety",
+    ],
+    shared_libs: [
+        "libbase",
+        "liblog",
+    ],
+    static_libs: [
+        "libtinyxml2",
+    ],
+    proprietary: true,
+    vendor: true,
+}
+
+cc_defaults {
+  name: "apex_update_listener_cc_defaults_static",
+  static_libs: [
+    "libtinyxml2",
+    "libapex_update_listener",
+  ],
+}
+
+cc_library_static {
+    name: "libapex_update_listener",
+    defaults: ["apex_update_listener_cc_defaults_internal"],
+    srcs: ["apex_update_listener.cc"],
+    export_include_dirs: ["."],
+}
+
+cc_test {
+    name: "libapex_update_listener_test",
+    defaults: ["apex_update_listener_cc_defaults_internal"],
+    gtest: true,
+    srcs: ["apex_update_listener_test.cc"],
+    static_libs: [
+        "libgmock",
+        "libapex_update_listener",
+    ],
+    require_root: true,
+}
diff --git a/common/apex_update_listener/apex_update_listener.cc b/common/apex_update_listener/apex_update_listener.cc
new file mode 100644
index 0000000..604f9d2
--- /dev/null
+++ b/common/apex_update_listener/apex_update_listener.cc
@@ -0,0 +1,190 @@
+#include "apex_update_listener.h"
+#include <log/log.h>
+#include <sys/inotify.h>
+#include <sys/types.h>
+#include <tinyxml2.h>
+#include <fstream>
+#include <sstream>
+#include <streambuf>
+#include <string>
+#include <vector>
+
+#undef LOG_TAG
+#define LOG_TAG "apex_update_listener"
+
+namespace {
+
+template <typename T>
+std::string ToString(const T& value) {
+  std::stringstream stream;
+  stream << value;
+  return stream.str();
+}
+
+}  // namespace
+
+ApexUpdateListener::ApexUpdateListener(
+    ApexUpdateListener::Sigil, const std::string& apex_name,
+    const std::string& apex_info_list_file_name, CallbackFunction callback,
+    int fd, int wd, std::set<Info> last_info)
+    : apex_name_(apex_name),
+      apex_info_list_file_name_(apex_info_list_file_name),
+      callback_function_(callback),
+      file_descriptor_(fd),
+      watch_descriptor_(wd),
+      last_info_(last_info),
+      thread_(&ApexUpdateListener::ThreadFunction, this) {
+}
+
+ApexUpdateListener::~ApexUpdateListener() {
+  running_ = false;
+  if (watch_descriptor_ >= 0) {
+    inotify_rm_watch(file_descriptor_, watch_descriptor_);
+  }
+  if (file_descriptor_ >= 0) {
+    close(file_descriptor_);
+  }
+  if (thread_.joinable()) {
+    thread_.join();
+  }
+}
+std::unique_ptr<ApexUpdateListener> ApexUpdateListener::Make(
+    const std::string& apex_name, CallbackFunction callback,
+    bool invoke_with_initial_version,
+    const std::string& apex_info_list_file_name) {
+  int file_descriptor = inotify_init();
+  if (file_descriptor < 0) {
+    ALOGE("Failed to inotify_init(): %s (%d)", strerror(errno), errno);
+    return nullptr;
+  }
+  int watch_descriptor = inotify_add_watch(
+      file_descriptor, std::string(apex_info_list_file_name).c_str(),
+      IN_MODIFY | IN_CREATE | IN_DELETE);
+  if (watch_descriptor < 0) {
+    ALOGE("Failed to inotify_add_watch(%s): %s (%d)",
+          std::string(apex_info_list_file_name).c_str(), strerror(errno), errno);
+    close(file_descriptor);
+    return nullptr;
+  }
+  if (!callback) {
+    ALOGE("Called with null callback");
+    return nullptr;
+  }
+  auto last_info = TrySlurpInfo(apex_name, apex_info_list_file_name);
+  if (!last_info.has_value()) {
+    ALOGE("Could not slurp info from %s for package %s",
+          std::string(apex_info_list_file_name).c_str(),
+          std::string(apex_name).c_str());
+    return nullptr;
+  }
+  if (invoke_with_initial_version) {
+    callback(std::set<Info>(), *last_info);
+  }
+
+  return std::make_unique<ApexUpdateListener>(
+      Sigil{}, apex_name, apex_info_list_file_name, callback, file_descriptor,
+      watch_descriptor, *last_info);
+}
+
+std::optional<std::set<ApexUpdateListener::Info>>
+ApexUpdateListener::TrySlurpInfo(const std::string& apex_name,
+                                 const std::string& apex_info_list_file_name) {
+  tinyxml2::XMLDocument xml_doc;
+  auto status = xml_doc.LoadFile(apex_info_list_file_name.c_str());
+  if (status != tinyxml2::XML_SUCCESS) {
+    ALOGE("XML parsing failed: %d", status);
+    return std::nullopt;
+  }
+  tinyxml2::XMLElement* apex_info_list =
+      xml_doc.FirstChildElement("apex-info-list");
+  if (!apex_info_list) {
+    ALOGE("XML did not contain apex-info-list");
+    return std::nullopt;
+  }
+  std::set<ApexUpdateListener::Info> ret;
+  for (tinyxml2::XMLElement* apex_info =
+           apex_info_list->FirstChildElement("apex-info");
+       apex_info != nullptr;
+       apex_info = apex_info->NextSiblingElement("apex-info")) {
+    if (apex_info->Attribute("moduleName", apex_name.c_str())) {
+      Info info{.module_name = apex_name.c_str()};
+      auto module_path = apex_info->Attribute("modulePath");
+      auto preinstalled_module_path =
+          apex_info->Attribute("preinstalledModulePath");
+      auto version_code = apex_info->Attribute("versionCode");
+      auto version_name = apex_info->Attribute("versionName");
+      auto is_active = apex_info->Attribute("isActive");
+      auto is_factory = apex_info->Attribute("isFactory");
+      if (module_path) {
+        info.module_path = module_path;
+      }
+      if (preinstalled_module_path) {
+        info.preinstalled_module_path = preinstalled_module_path;
+      }
+      if (version_code) {
+        info.version_code = std::strtol(version_code, nullptr, 10);
+      }
+      if (version_name) {
+        info.version_name = version_name;
+      }
+      if (is_active) {
+        info.is_active = !strcmp(is_active, "true");
+      }
+      if (is_factory) {
+        info.is_factory = !strcmp(is_factory, "true");
+      }
+      ret.insert(info);
+    }
+  }
+  if (ret.size()) {
+    return ret;
+  }
+  ALOGE("XML did not contain any apex-info about %s", apex_name.c_str());
+  return std::nullopt;
+}
+
+void ApexUpdateListener::ThreadFunction() {
+  // Maximum number of events to read at a time
+  constexpr int event_number = 16;
+  std::vector<struct inotify_event> events(event_number);
+  do {
+    auto length = read(file_descriptor_, events.data(),
+                       event_number * sizeof(inotify_event));
+    if (length == -EINTR) {
+      continue;  // Interrupted by signal, try again
+    }
+    if (length < 0 || !running_) {
+      if (running_) {
+        ALOGE("Error reading inotify event from '%s': %s (%d)",
+              apex_info_list_file_name_.c_str(), strerror(errno), errno);
+      }
+      return;
+    }
+
+    for (size_t i = 0; i < length / sizeof(inotify_event); i++) {
+      struct inotify_event& event = events[i];
+
+      if (event.mask & IN_CREATE) {
+        ALOGI("%s created as %s", apex_info_list_file_name_.c_str(),
+              (event.mask & IN_ISDIR) ? "directory" : "file");
+      } else if (event.mask & IN_DELETE) {
+        ALOGI("%s deleted as %s", apex_info_list_file_name_.c_str(),
+              (event.mask & IN_ISDIR) ? "directory" : "file");
+      } else if (event.mask & IN_MODIFY) {
+        ALOGI("%s modified as %s", apex_info_list_file_name_.c_str(),
+              (event.mask & IN_ISDIR) ? "directory" : "file");
+      }
+      // We don't really care how it was updated as long as it wasn't deleted
+      if (event.mask & IN_DELETE) {
+        continue;
+      }
+      auto info = TrySlurpInfo(apex_name_, apex_info_list_file_name_);
+      if (info.has_value()) {
+        if (*info != last_info_ && callback_function_) {
+          callback_function_(last_info_, *info);
+          last_info_ = *info;
+        }
+      }
+    }
+  } while (running_);
+}
diff --git a/common/apex_update_listener/apex_update_listener.h b/common/apex_update_listener/apex_update_listener.h
new file mode 100644
index 0000000..6263144
--- /dev/null
+++ b/common/apex_update_listener/apex_update_listener.h
@@ -0,0 +1,92 @@
+#pragma once
+
+#include <set>
+#include <sstream>
+#include <string>
+#include <string_view>
+#include <thread>
+
+class ApexUpdateListener {
+ private:
+  struct Sigil {};
+
+ public:
+  // Information extracted from updated /apex/apex-info-list.xml
+  struct Info {
+    std::string module_name;
+    std::string module_path;
+    std::string preinstalled_module_path;
+    int64_t version_code;
+    std::string version_name;
+    bool is_factory;
+    bool is_active;
+
+    auto AsTuple() const {
+      return std::make_tuple(is_active, is_factory, version_code, version_name,
+                             module_path, preinstalled_module_path, module_name);
+    }
+
+    bool operator<(const Info& other) const {
+      return AsTuple() < other.AsTuple();
+    }
+
+    bool operator==(const ApexUpdateListener::Info& other) const {
+      return !(other < *this) && !(*this < other);
+    }
+    bool operator!=(const ApexUpdateListener::Info& other) const {
+      return !(*this == other);
+    }
+    template <typename T>
+    friend auto& operator<<(T& stream, const ApexUpdateListener::Info& i) {
+      return stream << "{ moduleName: " << i.module_name
+                    << ", modulePath: " << i.module_path
+                    << ", preinstalledModulePath: " << i.preinstalled_module_path
+                    << ", versionCode: " << i.version_code
+                    << ", versionName: " << i.version_name
+                    << ", i.isFactory: " << i.is_factory
+                    << ", i.isActive: " << i.is_active << " }";
+    }
+    template <typename T>
+    friend auto& operator<<(T& stream,
+                            const std::set<ApexUpdateListener::Info>& s) {
+      stream << "{";
+      std::string sep = "";
+      for (auto& i : s) {
+        stream << sep << i;
+        sep = ", ";
+      }
+      return stream << "}";
+    }
+  };
+
+  using CallbackFunction =
+      std::function<void(const std::set<Info>& last_versions,
+                         const std::set<Info>& current_versions)>;
+
+  ApexUpdateListener(Sigil, const std::string& apex_name,
+                     const std::string& apex_info_list_file_name,
+                     CallbackFunction callback, int fd, int wd,
+                     std::set<Info> last_info);
+
+  static std::unique_ptr<ApexUpdateListener> Make(
+      const std::string& apex_name, CallbackFunction callback,
+      bool invoke_with_initial_version = false,
+      const std::string& apex_info_list_file_name = "/apex/apex-info-list.xml");
+
+  // We need some cleanup handling
+  ~ApexUpdateListener();
+
+ private:
+  const std::string apex_name_;
+  const std::string apex_info_list_file_name_;
+  const CallbackFunction callback_function_;
+  std::atomic<bool> running_ = true;
+  int file_descriptor_ = -1;
+  int watch_descriptor_ = -1;
+  std::set<Info> last_info_;
+  std::thread thread_;
+
+  void ThreadFunction();
+  static std::optional<std::set<Info>> TrySlurpInfo(
+      const std::string& apex_name, const std::string& apex_info_list_file_name);
+};
diff --git a/common/apex_update_listener/apex_update_listener_test.cc b/common/apex_update_listener/apex_update_listener_test.cc
new file mode 100644
index 0000000..3747ce0
--- /dev/null
+++ b/common/apex_update_listener/apex_update_listener_test.cc
@@ -0,0 +1,114 @@
+#include "apex_update_listener.h"
+
+#include <android-base/file.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+#include <fstream>
+#include <iostream>
+#include <set>
+#include <thread>
+
+#undef LOG_TAG
+#define LOG_TAG "apex_update_listener_test"
+
+namespace {
+
+using InfoSet = std::set<ApexUpdateListener::Info>;
+using testing::MockFunction;
+using testing::StrictMock;
+
+const std::string kApexInfoStateJustPreinstalled = R"xml(
+<?xml version="1.0" encoding="utf-8"?>
+<apex-info-list>
+    <apex-info moduleName="com.test.apex" modulePath="/vendor/apex/com.test.apex" preinstalledModulePath="/vendor/apex/com.test.apex" versionCode="100" versionName="" isFactory="true" isActive="true"></apex-info>
+</apex-info-list>
+)xml";
+
+const std::string kApexInfoStateOneOnData = R"xml(
+<?xml version="1.0" encoding="utf-8"?>
+<apex-info-list>
+    <apex-info moduleName="com.test.apex" modulePath="/data/apex/com.test.apex@200" preinstalledModulePath="/vendor/apex/com.test.apex" versionCode="200" versionName="" isFactory="false" isActive="true"></apex-info>
+    <apex-info moduleName="com.test.apex" modulePath="/vendor/apex/com.test.apex" preinstalledModulePath="/vendor/apex/com.test.apex" versionCode="100" versionName="" isFactory="true" isActive="false"></apex-info>
+</apex-info-list>
+)xml";
+
+void WriteStringToFile(const char* filename, const std::string& data) {
+  std::ofstream xml_stream(filename);
+  xml_stream.write(data.data(), data.size());
+}
+
+}  // namespace
+
+TEST(ApexUpdateListener, InstallUpdate) {
+  TemporaryFile tmp_xml_file;
+
+  StrictMock<MockFunction<ApexUpdateListener::CallbackFunction>> callback;
+  std::mutex mutex;
+  std::condition_variable wait_cv;
+  EXPECT_CALL(
+      callback,
+      Call(InfoSet{{.module_name = "com.test.apex",
+                    .module_path = "/vendor/apex/com.test.apex",
+                    .preinstalled_module_path = "/vendor/apex/com.test.apex",
+                    .version_code = 100,
+                    .version_name = "",
+                    .is_factory = true,
+                    .is_active = true}},
+           InfoSet{{.module_name = "com.test.apex",
+                    .module_path = "/vendor/apex/com.test.apex",
+                    .preinstalled_module_path = "/vendor/apex/com.test.apex",
+                    .version_code = 100,
+                    .version_name = "",
+                    .is_factory = true,
+                    .is_active = false},
+                   {.module_name = "com.test.apex",
+                    .module_path = "/data/apex/com.test.apex@200",
+                    .preinstalled_module_path = "/vendor/apex/com.test.apex",
+                    .version_code = 200,
+                    .version_name = "",
+                    .is_factory = false,
+                    .is_active = true}}))
+      .WillOnce([&mutex, &wait_cv]() {
+        std::lock_guard<std::mutex> lock(mutex);
+        wait_cv.notify_one();
+      });
+
+  WriteStringToFile(tmp_xml_file.path, kApexInfoStateJustPreinstalled);
+  std::unique_ptr<ApexUpdateListener> ptr = ApexUpdateListener::Make(
+      "com.test.apex", callback.AsStdFunction(), false, tmp_xml_file.path);
+  EXPECT_NE(ptr, nullptr);
+  WriteStringToFile(tmp_xml_file.path, kApexInfoStateOneOnData);
+  std::unique_lock<std::mutex> lock(mutex);
+  wait_cv.wait_for(lock, std::chrono::milliseconds(30000));
+}
+
+TEST(ApexUpdateListener, InvokeWithInitialVersion) {
+  TemporaryFile tmp_xml_file;
+
+  StrictMock<MockFunction<ApexUpdateListener::CallbackFunction>> callback;
+  std::mutex mutex;
+  std::condition_variable wait_cv;
+
+  EXPECT_CALL(
+      callback,
+      Call(InfoSet{},
+           InfoSet{{.module_name = "com.test.apex",
+                    .module_path = "/vendor/apex/com.test.apex",
+                    .preinstalled_module_path = "/vendor/apex/com.test.apex",
+                    .version_code = 100,
+                    .version_name = "",
+                    .is_factory = true,
+                    .is_active = true}}))
+      .WillOnce([&mutex, &wait_cv]() {
+        std::lock_guard<std::mutex> lock(mutex);
+        wait_cv.notify_one();
+      });
+
+  WriteStringToFile(tmp_xml_file.path, kApexInfoStateJustPreinstalled);
+  std::unique_ptr<ApexUpdateListener> ptr = ApexUpdateListener::Make(
+      "com.test.apex", callback.AsStdFunction(), true, tmp_xml_file.path);
+  EXPECT_NE(ptr, nullptr);
+  std::unique_lock<std::mutex> lock(mutex);
+  wait_cv.wait_for(lock, std::chrono::milliseconds(30000));
+}
diff --git a/common/hal/Android.bp b/common/hal/Android.bp
index e24e488..406b435 100644
--- a/common/hal/Android.bp
+++ b/common/hal/Android.bp
@@ -27,7 +27,7 @@
 
 cc_library_headers {
     name: "libgooglecamerahal_headers",
-    vendor_available: true,
+    vendor: true,
     export_include_dirs: [
         "common",
         "hwl_interface",
@@ -49,3 +49,22 @@
         "libhardware_headers",
     ],
 }
+
+soong_config_module_type {
+    name: "gch_hal_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "gch",
+    variables: ["hwl_library"],
+    properties: [
+        "cflags",
+        "enabled",
+        "shared_libs",
+        "srcs",
+    ],
+}
+
+// No value set (conditions_default) = load HWL with dlopen
+soong_config_string_variable {
+    name: "hwl_library",
+    values: ["lyric"],
+}
diff --git a/common/hal/OWNERS b/common/hal/OWNERS
index a561680..7b8350f 100644
--- a/common/hal/OWNERS
+++ b/common/hal/OWNERS
@@ -10,3 +10,5 @@
 blossomchiang@google.com
 khakisung@google.com
 vincechiu@google.com
+jasl@google.com
+owenkmg@google.com
\ No newline at end of file
diff --git a/common/hal/common/hal_types.h b/common/hal/common/hal_types.h
index 4fe1da0..5fa2064 100644
--- a/common/hal/common/hal_types.h
+++ b/common/hal/common/hal_types.h
@@ -106,7 +106,7 @@
 };
 
 // See the definition of
-// ::android::hardware::camera::device::V3_4::Stream;
+// ::android::hardware::camera::device::V3_7::Stream;
 struct Stream {
   int32_t id = -1;
   StreamType stream_type = StreamType::kOutput;
@@ -119,6 +119,9 @@
   bool is_physical_camera_stream = false;
   uint32_t physical_camera_id = 0;
   uint32_t buffer_size = 0;
+  int32_t group_id = -1;
+  bool used_in_max_resolution_mode = false;
+  bool used_in_default_resolution_mode = true;
 };
 
 // See the definition of
@@ -129,12 +132,13 @@
 };
 
 // See the definition of
-// ::android::hardware::camera::device::V3_5::StreamConfiguration;
+// ::android::hardware::camera::device::V3_7::StreamConfiguration;
 struct StreamConfiguration {
   std::vector<Stream> streams;
   StreamConfigurationMode operation_mode;
   std::unique_ptr<HalCameraMetadata> session_params;
   uint32_t stream_config_counter = 0;
+  bool multi_resolution_input_image = false;
 };
 
 struct CameraIdAndStreamConfiguration {
@@ -207,6 +211,9 @@
   // Maps from physical camera ID to physical camera settings.
   std::unordered_map<uint32_t, std::unique_ptr<HalCameraMetadata>>
       physical_camera_settings;
+
+  uint32_t input_width;
+  uint32_t input_height;
 };
 
 // See the definition of
@@ -293,8 +300,8 @@
 };
 
 struct Dimension {
-  uint32_t width;
-  uint32_t height;
+  uint32_t width = 0;
+  uint32_t height = 0;
 };
 
 struct Point {
diff --git a/common/hal/common/session_data_defs.h b/common/hal/common/session_data_defs.h
index 0adcbc2..2049d7e 100644
--- a/common/hal/common/session_data_defs.h
+++ b/common/hal/common/session_data_defs.h
@@ -17,6 +17,8 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_SESSION_DATA_DEFS_H
 #define HARDWARE_GOOGLE_CAMERA_HAL_SESSION_DATA_DEFS_H
 
+#include <cstdint>
+
 namespace android {
 namespace google_camera_hal {
 
diff --git a/common/hal/common/thermal_types.h b/common/hal/common/thermal_types.h
index 26a2405..6887b99 100644
--- a/common/hal/common/thermal_types.h
+++ b/common/hal/common/thermal_types.h
@@ -17,6 +17,12 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_COMMON_THERMAL_TYPES_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_COMMON_THERMAL_TYPES_H_
 
+#include <utils/Errors.h>
+
+#include <cstdint>
+#include <functional>
+#include <string>
+
 namespace android {
 namespace google_camera_hal {
 
diff --git a/common/hal/common/vendor_tag_defs.h b/common/hal/common/vendor_tag_defs.h
index 7d93d8d..eef389d 100644
--- a/common/hal/common/vendor_tag_defs.h
+++ b/common/hal/common/vendor_tag_defs.h
@@ -46,6 +46,7 @@
   kSensorModeFullFov,
   kNonWarpedCropRegion,
   kHdrUsageMode,
+  kSwDenoiseEnabled,
   // This should not be used as a vendor tag ID on its own, but as a placeholder
   // to indicate the end of currently defined vendor tag IDs
   kEndMarker
@@ -204,6 +205,15 @@
     {.tag_id = VendorTagIds::kHdrUsageMode,
      .tag_name = "hdr.UsageMode",
      .tag_type = CameraMetadataType::kByte},
+    // Software denoise enabled
+    //
+    // Indicates whether the software denoise is enabled
+    //
+    // Present in: Characteristics
+    // Payload: SwDenoiseEnabled
+    {.tag_id = VendorTagIds::kSwDenoiseEnabled,
+     .tag_name = "SwDenoiseEnabled",
+     .tag_type = CameraMetadataType::kByte},
 };
 
 // Google Camera HAL vendor tag sections
diff --git a/common/hal/google_camera_hal/Android.bp b/common/hal/google_camera_hal/Android.bp
index fa9e03d..8155491 100644
--- a/common/hal/google_camera_hal/Android.bp
+++ b/common/hal/google_camera_hal/Android.bp
@@ -23,11 +23,34 @@
     default_applicable_licenses: ["hardware_google_camera_license"],
 }
 
+soong_config_module_type_import {
+    from: "hardware/google/camera/common/hal/Android.bp",
+    module_types: ["gch_hal_cc_defaults"],
+}
+
+gch_hal_cc_defaults {
+    name: "gch_hwl_linking_cc_defaults",
+    soong_config_variables: {
+        hwl_library: {
+            lyric: {
+                shared_libs: ["//vendor/google/services/LyricCameraHAL/src:liblyric_hwl"],
+            },
+            conditions_default: {
+                cflags: ["-DGCH_HWL_USE_DLOPEN=1"],
+            },
+        },
+    },
+}
+
 cc_library_shared {
     name: "libgooglecamerahal",
-    defaults: ["google_camera_hal_defaults"],
+    defaults: [
+        "google_camera_hal_defaults",
+        "gch_hwl_linking_cc_defaults",
+    ],
     owner: "google",
-    vendor_available: true,
+    vendor: true,
+    compile_multilib: "first",
     ldflags: [
         "-Wl,--rpath,/system/${LIB}/camera/capture_sessions",
         "-Wl,--rpath,/vendor/${LIB}/camera/capture_sessions",
@@ -39,19 +62,14 @@
         "camera_device.cc",
         "camera_device_session.cc",
         "camera_provider.cc",
+        "capture_session_utils.cc",
+        "capture_session_wrapper_process_block.cc",
         "depth_process_block.cc",
         "dual_ir_capture_session.cc",
         "dual_ir_depth_result_processor.cc",
         "dual_ir_request_processor.cc",
         "dual_ir_result_request_processor.cc",
-        "hal_utils.cc",
         "hdrplus_capture_session.cc",
-        "hdrplus_process_block.cc",
-        "hdrplus_request_processor.cc",
-        "hdrplus_result_processor.cc",
-        "hwl_buffer_allocator.cc",
-        "internal_stream_manager.cc",
-        "multicam_realtime_process_block.cc",
         "pending_requests_tracker.cc",
         "realtime_process_block.cc",
         "realtime_zsl_request_processor.cc",
@@ -60,17 +78,23 @@
         "rgbird_depth_result_processor.cc",
         "rgbird_result_request_processor.cc",
         "rgbird_rt_request_processor.cc",
+        "snapshot_request_processor.cc",
+        "snapshot_result_processor.cc",
         "vendor_tags.cc",
+        "zsl_snapshot_capture_session.cc",
     ],
     shared_libs: [
         "android.hardware.graphics.mapper@2.0",
         "android.hardware.graphics.mapper@3.0",
         "android.hardware.graphics.mapper@4.0",
+        "lib_profiler",
+        "libbase",
         "libcamera_metadata",
         "libcutils",
         "libgooglecamerahalutils",
         "libhidlbase",
         "liblog",
+        "libmeminfo",
         "libutils",
         "libsync",
     ],
@@ -85,4 +109,7 @@
     export_header_lib_headers: [
         "libgooglecamerahal_headers",
     ],
+    export_shared_lib_headers: [
+        "lib_profiler",
+    ],
 }
diff --git a/common/hal/google_camera_hal/basic_capture_session.cc b/common/hal/google_camera_hal/basic_capture_session.cc
index f4daeb5..667d8df 100644
--- a/common/hal/google_camera_hal/basic_capture_session.cc
+++ b/common/hal/google_camera_hal/basic_capture_session.cc
@@ -45,7 +45,7 @@
     CameraDeviceSessionHwl* device_session_hwl,
     const StreamConfiguration& stream_config,
     ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-    HwlRequestBuffersFunc /*request_stream_buffers*/,
+    HwlSessionCallback /*session_callback*/,
     std::vector<HalStream>* hal_configured_streams,
     CameraBufferAllocatorHwl* /*camera_allocator_hwl*/) {
   ATRACE_CALL();
diff --git a/common/hal/google_camera_hal/basic_capture_session.h b/common/hal/google_camera_hal/basic_capture_session.h
index 74071ac..c777e91 100644
--- a/common/hal/google_camera_hal/basic_capture_session.h
+++ b/common/hal/google_camera_hal/basic_capture_session.h
@@ -54,7 +54,7 @@
       CameraDeviceSessionHwl* device_session_hwl,
       const StreamConfiguration& stream_config,
       ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-      HwlRequestBuffersFunc request_stream_buffers,
+      HwlSessionCallback session_callback,
       std::vector<HalStream>* hal_configured_streams,
       CameraBufferAllocatorHwl* camera_allocator_hwl = nullptr);
 
diff --git a/common/hal/google_camera_hal/basic_request_processor.cc b/common/hal/google_camera_hal/basic_request_processor.cc
index 5b55d0c..c38d9e0 100644
--- a/common/hal/google_camera_hal/basic_request_processor.cc
+++ b/common/hal/google_camera_hal/basic_request_processor.cc
@@ -59,6 +59,8 @@
       HalCameraMetadata::Clone(stream_config.session_params.get());
   process_block_stream_config->stream_config_counter =
       stream_config.stream_config_counter;
+  process_block_stream_config->multi_resolution_input_image =
+      stream_config.multi_resolution_input_image;
 
   return OK;
 }
@@ -93,6 +95,8 @@
   block_request.frame_number = request.frame_number;
   block_request.settings = HalCameraMetadata::Clone(request.settings.get());
   block_request.input_buffers = request.input_buffers;
+  block_request.input_width = request.input_width;
+  block_request.input_height = request.input_height;
 
   for (auto& metadata : request.input_buffer_metadata) {
     block_request.input_buffer_metadata.push_back(
diff --git a/common/hal/google_camera_hal/camera_device.cc b/common/hal/google_camera_hal/camera_device.cc
index e0c4e32..fba579a 100644
--- a/common/hal/google_camera_hal/camera_device.cc
+++ b/common/hal/google_camera_hal/camera_device.cc
@@ -17,18 +17,101 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_CameraDevice"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#include "camera_device.h"
+
 #include <dlfcn.h>
+#include <errno.h>
 #include <log/log.h>
+#include <meminfo/procmeminfo.h>
+#include <stdio.h>
+#include <sys/mman.h>
 #include <sys/stat.h>
 #include <utils/Trace.h>
 
-#include "camera_device.h"
+#include <thread>
+
+#include "utils.h"
 #include "vendor_tags.h"
 
+using android::meminfo::ProcMemInfo;
+using namespace android::meminfo;
+
 namespace android {
+
+void MadviseFileForRange(size_t madvise_size_limit_bytes, size_t map_size_bytes,
+                         const uint8_t* map_begin, const uint8_t* map_end,
+                         const std::string& file_name) {
+  // Ideal blockTransferSize for madvising files (128KiB)
+  static const size_t kIdealIoTransferSizeBytes = 128 * 1024;
+  size_t target_size_bytes =
+      std::min<size_t>(map_size_bytes, madvise_size_limit_bytes);
+  if (target_size_bytes == 0) {
+    return;
+  }
+  std::string trace_tag =
+      "madvising " + file_name + " size=" + std::to_string(target_size_bytes);
+  ATRACE_NAME(trace_tag.c_str());
+  // Based on requested size (target_size_bytes)
+  const uint8_t* target_pos = map_begin + target_size_bytes;
+
+  // Clamp endOfFile if its past map_end
+  if (target_pos > map_end) {
+    target_pos = map_end;
+  }
+
+  // Madvise the whole file up to target_pos in chunks of
+  // kIdealIoTransferSizeBytes (to MADV_WILLNEED)
+  // Note:
+  // madvise(MADV_WILLNEED) will prefetch max(fd readahead size, optimal
+  // block size for device) per call, hence the need for chunks. (128KB is a
+  // good default.)
+  for (const uint8_t* madvise_start = map_begin; madvise_start < target_pos;
+       madvise_start += kIdealIoTransferSizeBytes) {
+    void* madvise_addr =
+        const_cast<void*>(reinterpret_cast<const void*>(madvise_start));
+    size_t madvise_length =
+        std::min(kIdealIoTransferSizeBytes,
+                 static_cast<size_t>(target_pos - madvise_start));
+    int status = madvise(madvise_addr, madvise_length, MADV_WILLNEED);
+    // In case of error we stop madvising rest of the file
+    if (status < 0) {
+      break;
+    }
+  }
+}
+
+static void ReadAheadVma(const Vma& vma, const size_t madvise_size_limit_bytes) {
+  const uint8_t* map_begin = reinterpret_cast<uint8_t*>(vma.start);
+  const uint8_t* map_end = reinterpret_cast<uint8_t*>(vma.end);
+  MadviseFileForRange(madvise_size_limit_bytes,
+                      static_cast<size_t>(map_end - map_begin), map_begin,
+                      map_end, vma.name);
+}
+
+static void LoadLibraries(const std::vector<std::string>* libs) {
+  auto vmaCollectorCb = [&libs](const Vma& vma) {
+    const static size_t kMadviseSizeLimitBytes =
+        std::numeric_limits<size_t>::max();
+    // Read ahead for anonymous VMAs and for specific files.
+    // vma.flags represents a VMAs rwx bits.
+    if (vma.inode == 0 && !vma.is_shared && vma.flags) {
+      ReadAheadVma(vma, kMadviseSizeLimitBytes);
+    } else if (vma.inode != 0 && libs != nullptr &&
+               std::any_of(libs->begin(), libs->end(),
+                           [&vma](std::string lib_name) {
+                             return lib_name.compare(vma.name) == 0;
+                           })) {
+      ReadAheadVma(vma, kMadviseSizeLimitBytes);
+    }
+  };
+  ProcMemInfo meminfo(getpid());
+  meminfo.ForEachVmaFromMaps(vmaCollectorCb);
+}
+
 namespace google_camera_hal {
 
 // HAL external capture session library path
+#if GCH_HWL_USE_DLOPEN
 #if defined(_LP64)
 constexpr char kExternalCaptureSessionDir[] =
     "/vendor/lib64/camera/capture_sessions/";
@@ -36,10 +119,12 @@
 constexpr char kExternalCaptureSessionDir[] =
     "/vendor/lib/camera/capture_sessions/";
 #endif
+#endif
 
 std::unique_ptr<CameraDevice> CameraDevice::Create(
     std::unique_ptr<CameraDeviceHwl> camera_device_hwl,
-    CameraBufferAllocatorHwl* camera_allocator_hwl) {
+    CameraBufferAllocatorHwl* camera_allocator_hwl,
+    const std::vector<std::string>* configure_streams_libs) {
   ATRACE_CALL();
   auto device = std::unique_ptr<CameraDevice>(new CameraDevice());
 
@@ -58,6 +143,7 @@
 
   ALOGI("%s: Created a camera device for public(%u)", __FUNCTION__,
         device->GetPublicCameraId());
+  device->configure_streams_libs_ = configure_streams_libs;
 
   return device;
 }
@@ -153,6 +239,9 @@
     return UNKNOWN_ERROR;
   }
 
+  std::thread t(LoadLibraries, configure_streams_libs_);
+  t.detach();
+
   return OK;
 }
 
@@ -167,33 +256,6 @@
   return supported;
 }
 
-// Returns an array of regular files under dir_path.
-static std::vector<std::string> FindLibraryPaths(const char* dir_path) {
-  std::vector<std::string> libs;
-
-  errno = 0;
-  DIR* dir = opendir(dir_path);
-  if (!dir) {
-    ALOGD("%s: Unable to open directory %s (%s)", __FUNCTION__, dir_path,
-          strerror(errno));
-    return libs;
-  }
-
-  struct dirent* entry = nullptr;
-  while ((entry = readdir(dir)) != nullptr) {
-    std::string lib_path(dir_path);
-    lib_path += entry->d_name;
-    struct stat st;
-    if (stat(lib_path.c_str(), &st) == 0) {
-      if (S_ISREG(st.st_mode)) {
-        libs.push_back(lib_path);
-      }
-    }
-  }
-
-  return libs;
-}
-
 status_t CameraDevice::LoadExternalCaptureSession() {
   ATRACE_CALL();
 
@@ -203,9 +265,14 @@
     return OK;
   }
 
-  for (const auto& lib_path : FindLibraryPaths(kExternalCaptureSessionDir)) {
+#if GCH_HWL_USE_DLOPEN
+  for (const auto& lib_path :
+       utils::FindLibraryPaths(kExternalCaptureSessionDir)) {
     ALOGI("%s: Loading %s", __FUNCTION__, lib_path.c_str());
     void* lib_handle = nullptr;
+    // load shared library and never unload
+    // TODO(b/...): Switch to using build-system based HWL
+    //   loading and remove dlopen here?
     lib_handle = dlopen(lib_path.c_str(), RTLD_NOW);
     if (lib_handle == nullptr) {
       ALOGW("Failed loading %s.", lib_path.c_str());
@@ -226,14 +293,24 @@
     external_session_factory_entries_.push_back(external_session_factory_t);
     external_capture_session_lib_handles_.push_back(lib_handle);
   }
+#else
+  if (GetCaptureSessionFactory) {
+    external_session_factory_entries_.push_back(GetCaptureSessionFactory);
+  }
+#endif
 
   return OK;
 }
 
 CameraDevice::~CameraDevice() {
-  for (auto lib_handle : external_capture_session_lib_handles_) {
-    dlclose(lib_handle);
+}
+
+std::unique_ptr<google::camera_common::Profiler> CameraDevice::GetProfiler(
+    uint32_t camera_id, int option) {
+  if (option & google::camera_common::Profiler::SetPropFlag::kCustomProfiler) {
+    return camera_device_hwl_->GetProfiler(camera_id, option);
   }
+  return nullptr;
 }
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/camera_device.h b/common/hal/google_camera_hal/camera_device.h
index 865bfa4..8edcbd7 100644
--- a/common/hal/google_camera_hal/camera_device.h
+++ b/common/hal/google_camera_hal/camera_device.h
@@ -21,6 +21,7 @@
 #include "camera_device_hwl.h"
 #include "camera_device_session.h"
 #include "hal_camera_metadata.h"
+#include "profiler.h"
 
 namespace android {
 namespace google_camera_hal {
@@ -36,7 +37,8 @@
   // lifetime of CameraDevice
   static std::unique_ptr<CameraDevice> Create(
       std::unique_ptr<CameraDeviceHwl> camera_device_hwl,
-      CameraBufferAllocatorHwl* camera_allocator_hwl = nullptr);
+      CameraBufferAllocatorHwl* camera_allocator_hwl = nullptr,
+      const std::vector<std::string>* configure_streams_libs = nullptr);
 
   virtual ~CameraDevice();
 
@@ -81,6 +83,9 @@
 
   status_t LoadExternalCaptureSession();
 
+  std::unique_ptr<google::camera_common::Profiler> GetProfiler(uint32_t camere_id,
+                                                               int option);
+
  protected:
   CameraDevice() = default;
 
@@ -98,6 +103,8 @@
   std::vector<GetCaptureSessionFactoryFunc> external_session_factory_entries_;
   // Opened library handles that should be closed on destruction
   std::vector<void*> external_capture_session_lib_handles_;
+
+  const std::vector<std::string>* configure_streams_libs_ = nullptr;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/camera_device_session.cc b/common/hal/google_camera_hal/camera_device_session.cc
index 7bdee63..29bc099 100644
--- a/common/hal/google_camera_hal/camera_device_session.cc
+++ b/common/hal/google_camera_hal/camera_device_session.cc
@@ -24,6 +24,7 @@
 #include <utils/Trace.h>
 
 #include "basic_capture_session.h"
+#include "capture_session_utils.h"
 #include "dual_ir_capture_session.h"
 #include "hal_utils.h"
 #include "hdrplus_capture_session.h"
@@ -31,6 +32,7 @@
 #include "vendor_tag_defs.h"
 #include "vendor_tag_types.h"
 #include "vendor_tags.h"
+#include "zsl_snapshot_capture_session.h"
 
 namespace android {
 namespace google_camera_hal {
@@ -51,6 +53,12 @@
              BasicCaptureSession::IsStreamConfigurationSupported,
          .CreateSession = BasicCaptureSession::Create}};
 
+std::vector<WrapperCaptureSessionEntryFuncs>
+    CameraDeviceSession::kWrapperCaptureSessionEntries = {
+        {.IsStreamConfigurationSupported =
+             ZslSnapshotCaptureSession::IsStreamConfigurationSupported,
+         .CreateSession = ZslSnapshotCaptureSession::Create}};
+
 std::unique_ptr<CameraDeviceSession> CameraDeviceSession::Create(
     std::unique_ptr<CameraDeviceSessionHwl> device_session_hwl,
     std::vector<GetCaptureSessionFactoryFunc> external_session_factory_entries,
@@ -65,8 +73,7 @@
   std::vector<uint32_t> physical_camera_ids =
       device_session_hwl->GetPhysicalCameraIds();
 
-  auto session =
-      std::unique_ptr<CameraDeviceSession>(new CameraDeviceSession());
+  auto session = std::unique_ptr<CameraDeviceSession>(new CameraDeviceSession());
   if (session == nullptr) {
     ALOGE("%s: Creating CameraDeviceSession failed.", __FUNCTION__);
     return nullptr;
@@ -125,10 +132,25 @@
   for (auto& stream_buffer : result->output_buffers) {
     int32_t stream_id = stream_buffer.stream_id;
     if (streams.find(stream_id) == streams.end()) {
-      ALOGE(
-          "%s: Can't find stream %d in frame %u result holder. It may"
-          " have been returned or have not been requested.",
-          __FUNCTION__, stream_id, frame_number);
+      // If stream_id belongs to a stream group, the HWL may choose to output
+      // buffers to a different stream in the same group.
+      if (grouped_stream_id_map_.count(stream_id) == 1) {
+        int32_t stream_id_for_group = grouped_stream_id_map_.at(stream_id);
+        if (streams.find(stream_id_for_group) != streams.end()) {
+          streams.erase(stream_id_for_group);
+        } else {
+          ALOGE(
+              "%s: Can't find stream_id_for_group %d for stream %d in frame %u "
+              "result holder. It may have been returned or have not been "
+              "requested.",
+              __FUNCTION__, stream_id_for_group, stream_id, frame_number);
+        }
+      } else {
+        ALOGE(
+            "%s: Can't find stream %d in frame %u result holder. It may"
+            " have been returned or have not been requested.",
+            __FUNCTION__, stream_id, frame_number);
+      }
       // Ignore this buffer and continue handling other buffers in the
       // result.
     } else {
@@ -175,7 +197,13 @@
   }
 
   for (auto& stream_buffer : result->output_buffers) {
-    ALOGV("%s: [sbc] <= Return result buf[%p], bid[%" PRIu64
+    ALOGV("%s: [sbc] <= Return result output buf[%p], bid[%" PRIu64
+          "], strm[%d], frm[%u]",
+          __FUNCTION__, stream_buffer.buffer, stream_buffer.buffer_id,
+          stream_buffer.stream_id, result->frame_number);
+  }
+  for (auto& stream_buffer : result->input_buffers) {
+    ALOGV("%s: [sbc] <= Return result input buf[%p], bid[%" PRIu64
           "], strm[%d], frm[%u]",
           __FUNCTION__, stream_buffer.buffer, stream_buffer.buffer_id,
           stream_buffer.stream_id, result->frame_number);
@@ -227,15 +255,28 @@
       frame_number = result.message.shutter.frame_number;
     }
     std::lock_guard<std::mutex> lock(request_record_lock_);
-    // Strip out results for frame number that has been notified as ERROR_REQUEST
-    if (error_notified_requests_.find(frame_number) !=
-        error_notified_requests_.end()) {
+    // Strip out results for frame number that has been notified
+    // ErrorCode::kErrorResult and ErrorCode::kErrorBuffer
+    if ((error_notified_requests_.find(frame_number) !=
+         error_notified_requests_.end()) &&
+        (result.type != MessageType::kShutter)) {
       return;
     }
 
     if (result.type == MessageType::kError &&
         result.message.error.error_code == ErrorCode::kErrorResult) {
       pending_results_.erase(frame_number);
+
+      if (ignore_shutters_.find(frame_number) == ignore_shutters_.end()) {
+        ignore_shutters_.insert(frame_number);
+      }
+    }
+
+    if (result.type == MessageType::kShutter) {
+      if (ignore_shutters_.find(frame_number) != ignore_shutters_.end()) {
+        ignore_shutters_.erase(frame_number);
+        return;
+      }
     }
   }
 
@@ -416,6 +457,7 @@
   }
 
   ZoomRatioMapper::InitParams params;
+  params.camera_id = camera_id_;
   params.active_array_dimension = {
       active_array_size.right - active_array_size.left + 1,
       active_array_size.bottom - active_array_size.top + 1};
@@ -459,6 +501,23 @@
   zoom_ratio_mapper_.Initialize(&params);
 }
 
+void CameraDeviceSession::DeriveGroupedStreamIdMap() {
+  // Group stream ids by stream group id
+  std::unordered_map<int32_t, std::vector<int32_t>> group_to_streams_map;
+  for (const auto& [stream_id, stream] : configured_streams_map_) {
+    if (stream.stream_type == StreamType::kOutput && stream.group_id != -1) {
+      group_to_streams_map[stream.group_id].push_back(stream_id);
+    }
+  }
+
+  // For each stream group, map all the streams' ids to one id
+  for (const auto& [group_id, stream_ids] : group_to_streams_map) {
+    for (size_t i = 1; i < stream_ids.size(); i++) {
+      grouped_stream_id_map_[stream_ids[i]] = stream_ids[0];
+    }
+  }
+}
+
 status_t CameraDeviceSession::LoadExternalCaptureSession(
     std::vector<GetCaptureSessionFactoryFunc> external_session_factory_entries) {
   ATRACE_CALL();
@@ -493,6 +552,7 @@
   for (auto external_session : external_capture_session_entries_) {
     delete external_session;
   }
+  external_capture_session_entries_.clear();
 
   if (buffer_mapper_v4_ != nullptr) {
     FreeImportedBufferHandles<android::hardware::graphics::mapper::V4_0::IMapper>(
@@ -560,8 +620,7 @@
 }
 
 status_t CameraDeviceSession::ConstructDefaultRequestSettings(
-    RequestTemplate type,
-    std::unique_ptr<HalCameraMetadata>* default_settings) {
+    RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
   ATRACE_CALL();
   status_t res = device_session_hwl_->ConstructDefaultRequestSettings(
       type, default_settings);
@@ -605,6 +664,7 @@
 
   std::lock_guard lock_capture_session(capture_session_lock_);
   if (capture_session_ != nullptr) {
+    ATRACE_NAME("CameraDeviceSession::DestroyOldSession");
     capture_session_ = nullptr;
   }
 
@@ -617,36 +677,14 @@
   hal_utils::DumpStreamConfiguration(stream_config, "App stream configuration");
 
   operation_mode_ = stream_config.operation_mode;
+  multi_res_reprocess_ = stream_config.multi_resolution_input_image;
 
-  // first pass: check loaded external capture sessions
-  for (auto externalSession : external_capture_session_entries_) {
-    if (externalSession->IsStreamConfigurationSupported(
-            device_session_hwl_.get(), stream_config)) {
-      capture_session_ = externalSession->CreateSession(
-          device_session_hwl_.get(), stream_config,
-          camera_device_session_callback_.process_capture_result,
-          camera_device_session_callback_.notify,
-          hwl_session_callback_.request_stream_buffers, hal_config,
-          camera_allocator_hwl_);
-      break;
-    }
-  }
-
-  // second pass: check predefined capture sessions
-  if (capture_session_ == nullptr) {
-    for (auto sessionEntry : kCaptureSessionEntries) {
-      if (sessionEntry.IsStreamConfigurationSupported(device_session_hwl_.get(),
-                                                      stream_config)) {
-        capture_session_ = sessionEntry.CreateSession(
-            device_session_hwl_.get(), stream_config,
-            camera_device_session_callback_.process_capture_result,
-            camera_device_session_callback_.notify,
-            hwl_session_callback_.request_stream_buffers, hal_config,
-            camera_allocator_hwl_);
-        break;
-      }
-    }
-  }
+  capture_session_ = CreateCaptureSession(
+      stream_config, kWrapperCaptureSessionEntries,
+      external_capture_session_entries_, kCaptureSessionEntries,
+      hwl_session_callback_, camera_allocator_hwl_, device_session_hwl_.get(),
+      hal_config, camera_device_session_callback_.process_capture_result,
+      camera_device_session_callback_.notify);
 
   if (capture_session_ == nullptr) {
     ALOGE("%s: Cannot find a capture session compatible with stream config",
@@ -691,10 +729,14 @@
     configured_streams_map_[stream.id] = stream;
   }
 
+  // Derives all stream ids within a group to a representative stream id
+  DeriveGroupedStreamIdMap();
+
   // If buffer management is support, create a pending request tracker for
   // capture request throttling.
   if (buffer_management_supported_) {
-    pending_requests_tracker_ = PendingRequestsTracker::Create(*hal_config);
+    pending_requests_tracker_ =
+        PendingRequestsTracker::Create(*hal_config, grouped_stream_id_map_);
     if (pending_requests_tracker_ == nullptr) {
       ALOGE("%s: Cannot create a pending request tracker.", __FUNCTION__);
       if (set_realtime_thread) {
@@ -710,6 +752,7 @@
       error_notified_requests_.clear();
       dummy_buffer_observed_.clear();
       pending_results_.clear();
+      ignore_shutters_.clear();
     }
   }
 
@@ -766,6 +809,20 @@
   updated_request->input_buffers = request.input_buffers;
   updated_request->input_buffer_metadata.clear();
   updated_request->output_buffers = request.output_buffers;
+  std::vector<uint32_t> physical_camera_ids =
+      device_session_hwl_->GetPhysicalCameraIds();
+  for (auto& [camid, physical_setting] : request.physical_camera_settings) {
+    if (std::find(physical_camera_ids.begin(), physical_camera_ids.end(),
+                  camid) == physical_camera_ids.end()) {
+      ALOGE("%s: Pyhsical camera id %d in request had not registered",
+            __FUNCTION__, camid);
+      return BAD_VALUE;
+    }
+    updated_request->physical_camera_settings[camid] =
+        HalCameraMetadata::Clone(physical_setting.get());
+  }
+  updated_request->input_width = request.input_width;
+  updated_request->input_height = request.input_height;
 
   // Returns -1 if kThermalThrottling is not defined, skip following process.
   if (get_camera_metadata_tag_type(VendorTagIds::kThermalThrottling) != -1) {
@@ -791,9 +848,7 @@
 
   AppendOutputIntentToSettingsLocked(request, updated_request);
 
-  // If buffer management API is supported, buffers will be requested via
-  // RequestStreamBuffersFunc.
-  if (!buffer_management_supported_) {
+  {
     std::lock_guard<std::mutex> lock(imported_buffer_handle_map_lock_);
 
     status_t res = UpdateBufferHandlesLocked(&updated_request->input_buffers);
@@ -802,12 +857,15 @@
             strerror(-res), res);
       return res;
     }
-
-    res = UpdateBufferHandlesLocked(&updated_request->output_buffers);
-    if (res != OK) {
-      ALOGE("%s: Updating output buffer handles failed: %s(%d)", __FUNCTION__,
-            strerror(-res), res);
-      return res;
+    // If buffer management API is supported, buffers will be requested via
+    // RequestStreamBuffersFunc.
+    if (!buffer_management_supported_) {
+      res = UpdateBufferHandlesLocked(&updated_request->output_buffers);
+      if (res != OK) {
+        ALOGE("%s: Updating output buffer handles failed: %s(%d)", __FUNCTION__,
+              strerror(-res), res);
+        return res;
+      }
     }
   }
 
@@ -881,6 +939,13 @@
     const CaptureRequest& request) {
   ATRACE_CALL();
 
+  status_t res = ImportBufferHandles(request.input_buffers);
+  if (res != OK) {
+    ALOGE("%s: Importing input buffer handles failed: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+
   if (buffer_management_supported_) {
     ALOGV(
         "%s: Buffer management is enabled. Skip importing buffers in "
@@ -889,13 +954,6 @@
     return OK;
   }
 
-  status_t res = ImportBufferHandles(request.input_buffers);
-  if (res != OK) {
-    ALOGE("%s: Importing input buffer handles failed: %s(%d)", __FUNCTION__,
-          strerror(-res), res);
-    return res;
-  }
-
   res = ImportBufferHandles(request.output_buffers);
   if (res != OK) {
     ALOGE("%s: Importing output buffer handles failed: %s(%d)", __FUNCTION__,
@@ -950,6 +1008,9 @@
           if (pending_results_.find(frame_number) != pending_results_.end()) {
             need_to_notify_error_result = true;
             pending_results_.erase(frame_number);
+            if (ignore_shutters_.find(frame_number) == ignore_shutters_.end()) {
+              ignore_shutters_.insert(frame_number);
+            }
           }
           need_to_handle_result = true;
           break;
@@ -1098,13 +1159,24 @@
     return;
   }
 
+  // Note: This function should only be called if buffer_management_supported_
+  // is true.
+  if (pending_request_streams_.empty()) {
+    pending_requests_tracker_->OnBufferCacheFlushed();
+  }
+
   // Add streams into pending_request_streams_
   uint32_t frame_number = request.frame_number;
   if (*need_to_process) {
     std::lock_guard<std::mutex> lock(request_record_lock_);
     pending_results_.insert(frame_number);
     for (auto& stream_buffer : request.output_buffers) {
-      pending_request_streams_[frame_number].insert(stream_buffer.stream_id);
+      if (grouped_stream_id_map_.count(stream_buffer.stream_id) == 1) {
+        pending_request_streams_[frame_number].insert(
+            grouped_stream_id_map_.at(stream_buffer.stream_id));
+      } else {
+        pending_request_streams_[frame_number].insert(stream_buffer.stream_id);
+      }
     }
   }
 }
@@ -1135,6 +1207,24 @@
             buffer.stream_id);
       return BAD_VALUE;
     }
+    const Stream& input_stream = configured_streams_map_.at(buffer.stream_id);
+    if (!multi_res_reprocess_ && (request.input_width != input_stream.width ||
+                                  request.input_height != input_stream.height)) {
+      ALOGE("%s: Invalid input size [%d, %d], expected [%d, %d]", __FUNCTION__,
+            request.input_width, request.input_height, input_stream.width,
+            input_stream.height);
+      return BAD_VALUE;
+    }
+  }
+  if (request.input_buffers.size() > 0) {
+    if (multi_res_reprocess_ &&
+        (request.input_width == 0 || request.input_height == 0)) {
+      ALOGE(
+          "%s: Session is a multi-res input session, but has invalid input "
+          "size [%d, %d]",
+          __FUNCTION__, request.input_width, request.input_height);
+      return BAD_VALUE;
+    }
   }
 
   for (auto& buffer : request.output_buffers) {
@@ -1459,6 +1549,15 @@
     output_intent = static_cast<uint8_t>(OutputIntent::kZsl);
   }
 
+  // Used to indicate the possible start and end of video recording in traces
+  if (has_video && !prev_output_intent_has_video_) {
+    ATRACE_NAME("Start Video Streaming");
+  } else if (prev_output_intent_has_video_ && !has_video) {
+    ATRACE_NAME("Stop Video Streaming");
+  }
+
+  prev_output_intent_has_video_ = has_video;
+
   status_t res = updated_request->settings->Set(VendorTagIds::kOutputIntent,
                                                 &output_intent,
                                                 /*data_count=*/1);
@@ -1539,6 +1638,11 @@
             __FUNCTION__);
       return UNKNOWN_ERROR;
     }
+    // Input stream buffers are always allocated by the camera service, not by
+    // request_stream_buffers() callback from the HAL.
+    if (stream.stream_type != StreamType::kOutput) {
+      continue;
+    }
 
     StreamBufferRequestFunc session_request_func = StreamBufferRequestFunc(
         [this, stream_id](uint32_t num_buffer,
@@ -1745,5 +1849,10 @@
   }
 }
 
+std::unique_ptr<google::camera_common::Profiler>
+CameraDeviceSession::GetProfiler(uint32_t camera_id, int option) {
+  return device_session_hwl_->GetProfiler(camera_id, option);
+}
+
 }  // namespace google_camera_hal
 }  // namespace android
diff --git a/common/hal/google_camera_hal/camera_device_session.h b/common/hal/google_camera_hal/camera_device_session.h
index d373c40..35291e6 100644
--- a/common/hal/google_camera_hal/camera_device_session.h
+++ b/common/hal/google_camera_hal/camera_device_session.h
@@ -20,15 +20,19 @@
 #include <android/hardware/graphics/mapper/2.0/IMapper.h>
 #include <android/hardware/graphics/mapper/3.0/IMapper.h>
 #include <android/hardware/graphics/mapper/4.0/IMapper.h>
+
 #include <memory>
 #include <set>
 #include <shared_mutex>
+#include <vector>
 
 #include "camera_buffer_allocator_hwl.h"
 #include "camera_device_session_hwl.h"
 #include "capture_session.h"
+#include "capture_session_utils.h"
 #include "hal_camera_metadata.h"
 #include "hal_types.h"
+#include "hwl_types.h"
 #include "pending_requests_tracker.h"
 #include "stream_buffer_cache_manager.h"
 #include "thermal_types.h"
@@ -61,26 +65,6 @@
   UnregisterThermalChangedCallbackFunc unregister_thermal_changed_callback;
 };
 
-// Session function invoked to query if particular stream config supported
-using StreamConfigSupportedFunc =
-    std::function<bool(CameraDeviceSessionHwl* device_session_hwl,
-                       const StreamConfiguration& stream_config)>;
-
-// Session function invoked to create session instance
-using CaptureSessionCreateFunc = std::function<std::unique_ptr<CaptureSession>(
-    CameraDeviceSessionHwl* device_session_hwl,
-    const StreamConfiguration& stream_config,
-    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-    HwlRequestBuffersFunc request_stream_buffers,
-    std::vector<HalStream>* hal_configured_streams,
-    CameraBufferAllocatorHwl* camera_allocator_hwl)>;
-
-// define entry points to capture session
-struct CaptureSessionEntryFuncs {
-  StreamConfigSupportedFunc IsStreamConfigurationSupported;
-  CaptureSessionCreateFunc CreateSession;
-};
-
 // Entry point for getting an external capture session.
 using GetCaptureSessionFactoryFunc = ExternalCaptureSessionFactory* (*)();
 
@@ -141,6 +125,9 @@
                                      const HalCameraMetadata* new_session,
                                      bool* reconfiguration_required);
 
+  std::unique_ptr<google::camera_common::Profiler> GetProfiler(uint32_t camere_id,
+                                                               int option);
+
  protected:
   CameraDeviceSession() = default;
 
@@ -296,6 +283,10 @@
 
   void InitializeZoomRatioMapper(HalCameraMetadata* characteristics);
 
+  // For all the stream ID groups, derive the mapping between all stream IDs
+  // within that group to one single stream ID for easier tracking.
+  void DeriveGroupedStreamIdMap();
+
   uint32_t camera_id_ = 0;
   std::unique_ptr<CameraDeviceSessionHwl> device_session_hwl_;
 
@@ -341,6 +332,12 @@
   // Protected by session_lock_.
   std::unordered_map<int32_t, Stream> configured_streams_map_;
 
+  // Map from all stream IDs within a stream group to one single stream ID for
+  // easier request/buffer tracking. For example, if a stream group contains 3
+  // streams: {1, 2, 3}, The mapping could be {2->1, 3->1}. All requests and
+  // buffers for stream 2 and stream 3 will be mapped to stream 1 for tracking.
+  std::unordered_map<int32_t, int32_t> grouped_stream_id_map_;
+
   // Last valid settings in capture request. Must be protected by session_lock_.
   std::unique_ptr<HalCameraMetadata> last_request_settings_;
 
@@ -353,6 +350,9 @@
   // Must be protected by session_lock_.
   bool thermal_throttling_notified_ = false;
 
+  // Predefined wrapper capture session entry points
+  static std::vector<WrapperCaptureSessionEntryFuncs> kWrapperCaptureSessionEntries;
+
   // Predefined capture session entry points
   static std::vector<CaptureSessionEntryFuncs> kCaptureSessionEntries;
 
@@ -380,6 +380,9 @@
   // Protected by session_lock_.
   bool has_valid_settings_ = false;
 
+  // If the previous output intent had a stream with video encoder usage.
+  bool prev_output_intent_has_video_ = false;
+
   // request_record_lock_ protects the following variables as noted
   std::mutex request_record_lock_;
 
@@ -403,6 +406,9 @@
   // Operation mode of stream configuration
   StreamConfigurationMode operation_mode_ = StreamConfigurationMode::kNormal;
 
+  // Whether this stream configuration is a multi-res reprocessing configuration
+  bool multi_res_reprocess_ = false;
+
   // Flush is running or not
   std::atomic<bool> is_flushing_ = false;
 
@@ -413,6 +419,10 @@
   // Protected by request_record_lock_;
   std::set<uint32_t> pending_results_;
 
+  // Record the shutters need to ignore for error result case
+  // Protected by request_record_lock_;
+  std::set<uint32_t> ignore_shutters_;
+
   static constexpr int32_t kInvalidStreamId = -1;
 };
 
diff --git a/common/hal/google_camera_hal/camera_provider.cc b/common/hal/google_camera_hal/camera_provider.cc
index b481128..ff1c807 100644
--- a/common/hal/google_camera_hal/camera_provider.cc
+++ b/common/hal/google_camera_hal/camera_provider.cc
@@ -17,19 +17,19 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_CameraProvider"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#include "camera_provider.h"
+
+#include <dlfcn.h>
 #include <log/log.h>
 #include <utils/Trace.h>
 
-#include <dlfcn.h>
-
-#include "camera_provider.h"
 #include "vendor_tag_defs.h"
 #include "vendor_tag_utils.h"
 
 // HWL layer implementation path
 #if defined(_LP64)
 std::string kCameraHwlLib = "/vendor/lib64/libgooglecamerahwl_impl.so";
-#else // defined(_LP64)
+#else  // defined(_LP64)
 std::string kCameraHwlLib = "/vendor/lib/libgooglecamerahwl_impl.so";
 #endif
 
@@ -68,8 +68,7 @@
   ATRACE_CALL();
   // Advertise the HAL vendor tags to the camera metadata framework before
   // creating a HWL provider.
-  status_t res =
-      VendorTagManager::GetInstance().AddTags(kHalVendorTagSections);
+  status_t res = VendorTagManager::GetInstance().AddTags(kHalVendorTagSections);
   if (res != OK) {
     ALOGE("%s: Initializing VendorTagManager failed: %s(%d)", __FUNCTION__,
           strerror(-res), res);
@@ -77,7 +76,7 @@
   }
 
   if (camera_provider_hwl == nullptr) {
-    status_t res = CreateCameraProviderHwl(&camera_provider_hwl);
+    status_t res = CreateHwl(&camera_provider_hwl);
     if (res != OK) {
       ALOGE("%s: Creating CameraProviderHwlImpl failed.", __FUNCTION__);
       return NO_INIT;
@@ -265,7 +264,8 @@
     return res;
   }
 
-  if (std::find(camera_ids.begin(), camera_ids.end(), camera_id) == camera_ids.end()) {
+  if (std::find(camera_ids.begin(), camera_ids.end(), camera_id) ==
+      camera_ids.end()) {
     ALOGE("%s: camera_id: %u  invalid.", __FUNCTION__, camera_id);
     return BAD_VALUE;
   }
@@ -279,8 +279,15 @@
     return res;
   }
 
-  *device = CameraDevice::Create(std::move(camera_device_hwl),
-                                 camera_allocator_hwl_.get());
+  const std::vector<std::string>* configure_streams_libs = nullptr;
+
+#if GCH_HWL_USE_DLOPEN
+  configure_streams_libs = reinterpret_cast<decltype(configure_streams_libs)>(
+      dlsym(hwl_lib_handle_, "configure_streams_libraries"));
+#endif
+  *device =
+      CameraDevice::Create(std::move(camera_device_hwl),
+                           camera_allocator_hwl_.get(), configure_streams_libs);
   if (*device == nullptr) {
     return NO_INIT;
   }
@@ -288,16 +295,18 @@
   return OK;
 }
 
-status_t CameraProvider::CreateCameraProviderHwl(
+status_t CameraProvider::CreateHwl(
     std::unique_ptr<CameraProviderHwl>* camera_provider_hwl) {
   ATRACE_CALL();
+#if GCH_HWL_USE_DLOPEN
   CreateCameraProviderHwl_t create_hwl;
 
   ALOGI("%s:Loading %s library", __FUNCTION__, kCameraHwlLib.c_str());
   hwl_lib_handle_ = dlopen(kCameraHwlLib.c_str(), RTLD_NOW);
 
   if (hwl_lib_handle_ == nullptr) {
-    ALOGE("HWL loading %s failed.", kCameraHwlLib.c_str());
+    ALOGE("HWL loading %s failed due to error: %s", kCameraHwlLib.c_str(),
+          dlerror());
     return NO_INIT;
   }
 
@@ -316,6 +325,10 @@
     ALOGE("Error! Creating CameraProviderHwl failed");
     return UNKNOWN_ERROR;
   }
+#else
+  *camera_provider_hwl =
+      std::unique_ptr<CameraProviderHwl>(CreateCameraProviderHwl());
+#endif
 
   return OK;
 }
diff --git a/common/hal/google_camera_hal/camera_provider.h b/common/hal/google_camera_hal/camera_provider.h
index 9cc32f2..dd3451d 100644
--- a/common/hal/google_camera_hal/camera_provider.h
+++ b/common/hal/google_camera_hal/camera_provider.h
@@ -76,8 +76,7 @@
   // Initialize the vendor tag manager
   status_t InitializeVendorTags();
 
-  status_t CreateCameraProviderHwl(
-      std::unique_ptr<CameraProviderHwl>* camera_provider_hwl);
+  status_t CreateHwl(std::unique_ptr<CameraProviderHwl>* camera_provider_hwl);
 
   // Provider library handle.
   void* hwl_lib_handle_ = nullptr;
@@ -92,6 +91,8 @@
   std::vector<VendorTagSection> vendor_tag_sections_;
 };
 
+extern "C" CameraProviderHwl* CreateCameraProviderHwl();
+
 }  // namespace google_camera_hal
 }  // namespace android
 
diff --git a/common/hal/google_camera_hal/capture_session_utils.cc b/common/hal/google_camera_hal/capture_session_utils.cc
new file mode 100644
index 0000000..3d5f34c
--- /dev/null
+++ b/common/hal/google_camera_hal/capture_session_utils.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "capture_session_utils.h"
+
+#include "zsl_snapshot_capture_session.h"
+
+namespace android {
+namespace google_camera_hal {
+
+std::unique_ptr<CaptureSession> CreateCaptureSession(
+    const StreamConfiguration& stream_config,
+    const std::vector<WrapperCaptureSessionEntryFuncs>&
+        wrapper_capture_session_entries,
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* camera_device_session_hwl,
+    std::vector<HalStream>* hal_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify) {
+  // first pass: check predefined wrapper capture session
+  for (auto sessionEntry : wrapper_capture_session_entries) {
+    if (sessionEntry.IsStreamConfigurationSupported(camera_device_session_hwl,
+                                                    stream_config)) {
+      return sessionEntry.CreateSession(
+          stream_config, external_capture_session_entries,
+          capture_session_entries, hwl_session_callback,
+          camera_buffer_allocator_hwl, camera_device_session_hwl, hal_config,
+          process_capture_result, notify);
+    }
+  }
+
+  // second pass: check loaded external capture sessions
+  for (auto externalSession : external_capture_session_entries) {
+    if (externalSession->IsStreamConfigurationSupported(
+            camera_device_session_hwl, stream_config)) {
+      return externalSession->CreateSession(
+          camera_device_session_hwl, stream_config, process_capture_result,
+          notify, hwl_session_callback, hal_config, camera_buffer_allocator_hwl);
+    }
+  }
+
+  // third pass: check predefined capture sessions
+  for (auto sessionEntry : capture_session_entries) {
+    if (sessionEntry.IsStreamConfigurationSupported(camera_device_session_hwl,
+                                                    stream_config)) {
+      return sessionEntry.CreateSession(
+          camera_device_session_hwl, stream_config, process_capture_result,
+          notify, hwl_session_callback, hal_config, camera_buffer_allocator_hwl);
+    }
+  }
+  return nullptr;
+}
+
+}  // namespace google_camera_hal
+}  // namespace android
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/capture_session_utils.h b/common/hal/google_camera_hal/capture_session_utils.h
new file mode 100644
index 0000000..f77c553
--- /dev/null
+++ b/common/hal/google_camera_hal/capture_session_utils.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_UTILS_H_
+#define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_UTILS_H_
+
+#include <utils/Errors.h>
+
+#include "camera_buffer_allocator_hwl.h"
+#include "camera_device_session_hwl.h"
+#include "capture_session.h"
+#include "hal_types.h"
+#include "hwl_types.h"
+
+namespace android {
+namespace google_camera_hal {
+
+// Session function invoked to query if particular stream config supported
+using StreamConfigSupportedFunc =
+    std::function<bool(CameraDeviceSessionHwl* device_session_hwl,
+                       const StreamConfiguration& stream_config)>;
+
+// Session function invoked to create session instance
+using CaptureSessionCreateFunc = std::function<std::unique_ptr<CaptureSession>(
+    CameraDeviceSessionHwl* device_session_hwl,
+    const StreamConfiguration& stream_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
+    HwlSessionCallback session_callback,
+    std::vector<HalStream>* hal_configured_streams,
+    CameraBufferAllocatorHwl* camera_allocator_hwl)>;
+
+// define entry points to capture session
+struct CaptureSessionEntryFuncs {
+  StreamConfigSupportedFunc IsStreamConfigurationSupported;
+  CaptureSessionCreateFunc CreateSession;
+};
+
+// Session function invoked to create wrapper capture session instance
+using WrapperCaptureSessionCreateFunc =
+    std::function<std::unique_ptr<CaptureSession>(
+        const StreamConfiguration& stream_config,
+        const std::vector<ExternalCaptureSessionFactory*>&
+            external_capture_session_entries,
+        const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+        HwlSessionCallback hwl_session_callback,
+        CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+        CameraDeviceSessionHwl* camera_device_session_hwl,
+        std::vector<HalStream>* hal_configured_streams,
+        ProcessCaptureResultFunc process_capture_result, NotifyFunc notify)>;
+
+// define entry points to capture session
+struct WrapperCaptureSessionEntryFuncs {
+  StreamConfigSupportedFunc IsStreamConfigurationSupported;
+  WrapperCaptureSessionCreateFunc CreateSession;
+};
+
+// Select and create capture session.
+// When consider_zsl_capture_session is enabled, we will first consider using
+// ZslCaptureSession as a wrapper capture session when it supports the given
+// configurations.
+std::unique_ptr<CaptureSession> CreateCaptureSession(
+    const StreamConfiguration& stream_config,
+    const std::vector<WrapperCaptureSessionEntryFuncs>&
+        wrapper_capture_session_entries,
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* camera_device_session_hwl,
+    std::vector<HalStream>* hal_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify);
+
+}  // namespace google_camera_hal
+}  // namespace android
+
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_UTILS_H_
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/capture_session_wrapper_process_block.cc b/common/hal/google_camera_hal/capture_session_wrapper_process_block.cc
new file mode 100644
index 0000000..74e1eda
--- /dev/null
+++ b/common/hal/google_camera_hal/capture_session_wrapper_process_block.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <cstddef>
+#include <memory>
+
+#define LOG_TAG "GCH_CaptureSessionWrapperProcessBlock"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#include <log/log.h>
+#include <utils/Trace.h>
+
+#include "capture_session_utils.h"
+#include "capture_session_wrapper_process_block.h"
+#include "hal_types.h"
+#include "hal_utils.h"
+#include "process_block.h"
+#include "result_processor.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace google_camera_hal {
+
+std::unique_ptr<CaptureSessionWrapperProcessBlock>
+CaptureSessionWrapperProcessBlock::Create(
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* device_session_hwl,
+    std::vector<HalStream>* hal_config) {
+  ATRACE_CALL();
+  if (!IsSupported(device_session_hwl)) {
+    ALOGE("%s: Not supported.", __FUNCTION__);
+    return nullptr;
+  }
+
+  auto block = std::unique_ptr<CaptureSessionWrapperProcessBlock>(
+      new CaptureSessionWrapperProcessBlock(
+          external_capture_session_entries, capture_session_entries,
+          hwl_session_callback, camera_buffer_allocator_hwl, device_session_hwl,
+          hal_config));
+  if (block == nullptr) {
+    ALOGE("%s: Creating CaptureSessionWrapperProcessBlock failed.",
+          __FUNCTION__);
+    return nullptr;
+  }
+
+  return block;
+}
+
+bool CaptureSessionWrapperProcessBlock::IsSupported(
+    CameraDeviceSessionHwl* device_session_hwl) {
+  if (device_session_hwl == nullptr) {
+    ALOGE("%s: device_session_hwl is nullptr", __FUNCTION__);
+    return false;
+  }
+
+  return true;
+}
+
+CaptureSessionWrapperProcessBlock::CaptureSessionWrapperProcessBlock(
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* camera_device_session_hwl,
+    std::vector<HalStream>* hal_config)
+    : kCameraId(camera_device_session_hwl->GetCameraId()),
+      external_capture_session_entries_(external_capture_session_entries),
+      capture_session_entries_(capture_session_entries),
+      hwl_session_callback_(hwl_session_callback),
+      camera_buffer_allocator_hwl_(camera_buffer_allocator_hwl),
+      camera_device_session_hwl_(camera_device_session_hwl),
+      hal_config_(hal_config) {
+}
+
+status_t CaptureSessionWrapperProcessBlock::SetResultProcessor(
+    std::unique_ptr<ResultProcessor> result_processor) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(result_processor_lock_);
+
+  result_processor_ = std::move(result_processor);
+  return OK;
+}
+
+status_t CaptureSessionWrapperProcessBlock::ConfigureStreams(
+    const StreamConfiguration& stream_config,
+    const StreamConfiguration& /*overall_config*/) {
+  ATRACE_CALL();
+  std::lock_guard lock(configure_shared_mutex_);
+  if (is_configured_) {
+    ALOGE("%s: Already configured.", __FUNCTION__);
+    return ALREADY_EXISTS;
+  }
+
+  if (result_processor_ == nullptr) {
+    ALOGE(
+        "%s: result processor is not set yet. Not able to set the callback "
+        "function.",
+        __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  process_capture_result_ =
+      ProcessCaptureResultFunc([this](std::unique_ptr<CaptureResult> result) {
+        ProcessBlockResult process_block_result;
+        process_block_result.result = std::move(result);
+        result_processor_->ProcessResult(std::move(process_block_result));
+      });
+  notify_ = NotifyFunc([this](const NotifyMessage& message) {
+    ProcessBlockNotifyMessage process_block_message{.message = message};
+    result_processor_->Notify(std::move(process_block_message));
+  });
+
+  // TODO(mhtan): Add one more stream here
+  embedded_capture_session_ = CreateCaptureSession(
+      stream_config, /*wrapper_capture_session_entries=*/{},
+      external_capture_session_entries_, capture_session_entries_,
+      hwl_session_callback_, camera_buffer_allocator_hwl_,
+      camera_device_session_hwl_, hal_config_, process_capture_result_, notify_);
+  if (embedded_capture_session_ == nullptr) {
+    ALOGE("%s: Not able to create embedded capture session.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  is_configured_ = true;
+  return OK;
+}
+
+status_t CaptureSessionWrapperProcessBlock::GetConfiguredHalStreams(
+    std::vector<HalStream>* hal_streams) const {
+  ATRACE_CALL();
+  std::lock_guard lock(configure_shared_mutex_);
+  if (hal_streams == nullptr) {
+    ALOGE("%s: hal_streams is nullptr.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  if (!is_configured_) {
+    ALOGE("%s: Not configured yet.", __FUNCTION__);
+    return NO_INIT;
+  }
+
+  return camera_device_session_hwl_->GetConfiguredHalStream(pipeline_id_,
+                                                            hal_streams);
+}
+
+status_t CaptureSessionWrapperProcessBlock::ProcessRequests(
+    const std::vector<ProcessBlockRequest>& process_block_requests,
+    const CaptureRequest& remaining_session_request) {
+  ATRACE_CALL();
+  CaptureRequest request;
+  request.frame_number = remaining_session_request.frame_number;
+  for (auto& metadata : request.input_buffer_metadata) {
+    request.input_buffer_metadata.push_back(
+        HalCameraMetadata::Clone(metadata.get()));
+  }
+  request.input_buffers = remaining_session_request.input_buffers;
+  request.input_height = remaining_session_request.input_height;
+  request.input_width = remaining_session_request.input_width;
+  for (auto& [camera_id, physical_metadata] :
+       remaining_session_request.physical_camera_settings) {
+    request.physical_camera_settings[camera_id] =
+        HalCameraMetadata::Clone(physical_metadata.get());
+  }
+  request.settings =
+      HalCameraMetadata::Clone(remaining_session_request.settings.get());
+
+  request.output_buffers = process_block_requests[0].request.output_buffers;
+  for (auto& buffer : request.output_buffers) {
+    if (buffer.buffer != nullptr) {
+      buffer.buffer_id = buffer.stream_id;
+    }
+  }
+
+  return embedded_capture_session_->ProcessRequest(request);
+}
+
+status_t CaptureSessionWrapperProcessBlock::Flush() {
+  ATRACE_CALL();
+  std::shared_lock lock(configure_shared_mutex_);
+  if (!is_configured_) {
+    return OK;
+  }
+
+  return camera_device_session_hwl_->Flush();
+}
+
+}  // namespace google_camera_hal
+}  // namespace android
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/capture_session_wrapper_process_block.h b/common/hal/google_camera_hal/capture_session_wrapper_process_block.h
new file mode 100644
index 0000000..ea870a3
--- /dev/null
+++ b/common/hal/google_camera_hal/capture_session_wrapper_process_block.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_WRAPPER_PROCESS_BLOCK_H_
+#define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_WRAPPER_PROCESS_BLOCK_H_
+
+#include <shared_mutex>
+
+#include "camera_device_session.h"
+#include "capture_session.h"
+#include "process_block.h"
+
+namespace android {
+namespace google_camera_hal {
+
+// CaptureSessionWrapperProcessBlock implements a real-time ProcessBlock.
+// It can process real-time capture requests for a single physical camera or a
+// logical camera. It creates an embedded CaptureSession and the coming requests
+// will be processed by the embedded CaptureSession.
+class CaptureSessionWrapperProcessBlock : public ProcessBlock {
+ public:
+  // Create a CaptureSessionWrapperProcessBlock.
+  // device_session_hwl and capture_session are owned by the caller and must be
+  // valid during the lifetime of this CaptureSessionWrapperProcessBlock.
+  static std::unique_ptr<CaptureSessionWrapperProcessBlock> Create(
+      const std::vector<ExternalCaptureSessionFactory*>&
+          external_capture_session_entries,
+      const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+      HwlSessionCallback hwl_session_callback,
+      CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+      CameraDeviceSessionHwl* camera_device_session_hwl,
+      std::vector<HalStream>* hal_config);
+
+  virtual ~CaptureSessionWrapperProcessBlock() = default;
+
+  // Override functions of ProcessBlock start.
+  status_t ConfigureStreams(const StreamConfiguration& stream_config,
+                            const StreamConfiguration& overall_config) override;
+
+  status_t SetResultProcessor(
+      std::unique_ptr<ResultProcessor> result_processor) override;
+
+  status_t GetConfiguredHalStreams(
+      std::vector<HalStream>* hal_streams) const override;
+
+  status_t ProcessRequests(
+      const std::vector<ProcessBlockRequest>& process_block_requests,
+      const CaptureRequest& remaining_session_request) override;
+
+  status_t Flush() override;
+  // Override functions of ProcessBlock end.
+
+ protected:
+  CaptureSessionWrapperProcessBlock(
+      const std::vector<ExternalCaptureSessionFactory*>&
+          external_capture_session_entries,
+      const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+      HwlSessionCallback hwl_session_callback,
+      CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+      CameraDeviceSessionHwl* camera_device_session_hwl,
+      std::vector<HalStream>* hal_config);
+
+ private:
+  // Camera ID of this process block.
+  const uint32_t kCameraId;
+
+  // If the real-time process block supports the device session.
+  static bool IsSupported(CameraDeviceSessionHwl* device_session_hwl);
+
+  // Invoked when the HWL pipeline sends a result.
+  void NotifyHwlPipelineResult(std::unique_ptr<HwlPipelineResult> hwl_result);
+
+  // Invoked when the HWL pipeline sends a message.
+  void NotifyHwlPipelineMessage(uint32_t pipeline_id,
+                                const NotifyMessage& message);
+
+  mutable std::shared_mutex configure_shared_mutex_;
+
+  // If streams are configured. Must be protected by configure_shared_mutex_.
+  bool is_configured_ = false;
+
+  // HWL pipeline ID. Must be protected by configure_shared_mutex_.
+  uint32_t pipeline_id_ = 0;
+
+  std::mutex result_processor_lock_;
+
+  // Result processor. Must be protected by result_processor_lock_.
+  std::unique_ptr<ResultProcessor> result_processor_;
+
+  std::unique_ptr<CaptureSession> embedded_capture_session_;
+
+  ProcessCaptureResultFunc process_capture_result_;
+  NotifyFunc notify_;
+
+  const std::vector<ExternalCaptureSessionFactory*>&
+      external_capture_session_entries_;
+  const std::vector<CaptureSessionEntryFuncs>& capture_session_entries_;
+  HwlSessionCallback hwl_session_callback_;
+  CameraBufferAllocatorHwl* camera_buffer_allocator_hwl_ = nullptr;
+  CameraDeviceSessionHwl* camera_device_session_hwl_ = nullptr;
+
+  std::vector<HalStream>* hal_config_;
+};
+
+}  // namespace google_camera_hal
+}  // namespace android
+
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_CAPTURE_SESSION_WRAPPER_PROCESS_BLOCK_H_
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/depth_process_block.cc b/common/hal/google_camera_hal/depth_process_block.cc
index 99412be..b4f8121 100644
--- a/common/hal/google_camera_hal/depth_process_block.cc
+++ b/common/hal/google_camera_hal/depth_process_block.cc
@@ -33,8 +33,10 @@
 namespace android {
 namespace google_camera_hal {
 
+#if GCH_HWL_USE_DLOPEN
 static std::string kDepthGeneratorLib = "/vendor/lib64/libdepthgenerator.so";
 using android::depth_generator::CreateDepthGenerator_t;
+#endif
 const float kSmallOffset = 0.01f;
 
 std::unique_ptr<DepthProcessBlock> DepthProcessBlock::Create(
@@ -69,13 +71,13 @@
 
   // TODO(b/128633958): remove this after FLL syncing is verified
   block->force_internal_stream_ =
-      property_get_bool("persist.camera.rgbird.forceinternal", false);
+      property_get_bool("persist.vendor.camera.rgbird.forceinternal", false);
   if (block->force_internal_stream_) {
     ALOGI("%s: Force creating internal streams for IR pipelines", __FUNCTION__);
   }
 
-  block->pipelined_depth_engine_enabled_ =
-      property_get_bool("persist.camera.frontdepth.enablepipeline", true);
+  block->pipelined_depth_engine_enabled_ = property_get_bool(
+      "persist.vendor.camera.frontdepth.enablepipeline", true);
 
   // TODO(b/129910835): Change the controlling prop into some deterministic
   // logic that controls when the front depth autocal will be triggered.
@@ -456,6 +458,7 @@
 status_t DepthProcessBlock::LoadDepthGenerator(
     std::unique_ptr<DepthGenerator>* depth_generator) {
   ATRACE_CALL();
+#if GCH_HWL_USE_DLOPEN
   CreateDepthGenerator_t create_depth_generator;
 
   ALOGI("%s: Loading library: %s", __FUNCTION__, kDepthGeneratorLib.c_str());
@@ -479,6 +482,12 @@
   if (*depth_generator == nullptr) {
     return NO_INIT;
   }
+#else
+  if (CreateDepthGenerator == nullptr) {
+    return NO_INIT;
+  }
+  *depth_generator = std::unique_ptr<DepthGenerator>(CreateDepthGenerator());
+#endif
 
   return OK;
 }
diff --git a/common/hal/google_camera_hal/depth_process_block.h b/common/hal/google_camera_hal/depth_process_block.h
index 5013095..15c4341 100644
--- a/common/hal/google_camera_hal/depth_process_block.h
+++ b/common/hal/google_camera_hal/depth_process_block.h
@@ -203,6 +203,10 @@
   std::mutex depth_generator_api_lock_;
 };
 
+#if !GCH_HWL_USE_DLOPEN
+extern "C" __attribute__((weak)) DepthGenerator* CreateDepthGenerator();
+#endif
+
 }  // namespace google_camera_hal
 }  // namespace android
 
diff --git a/common/hal/google_camera_hal/dual_ir_capture_session.cc b/common/hal/google_camera_hal/dual_ir_capture_session.cc
index 0ec78d8..b1dfd00 100644
--- a/common/hal/google_camera_hal/dual_ir_capture_session.cc
+++ b/common/hal/google_camera_hal/dual_ir_capture_session.cc
@@ -91,7 +91,7 @@
     CameraDeviceSessionHwl* device_session_hwl,
     const StreamConfiguration& stream_config,
     ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-    HwlRequestBuffersFunc /*request_stream_buffers*/,
+    HwlSessionCallback /*session_callback*/,
     std::vector<HalStream>* hal_configured_streams,
     CameraBufferAllocatorHwl* /*camera_allocator_hwl*/) {
   ATRACE_CALL();
diff --git a/common/hal/google_camera_hal/dual_ir_capture_session.h b/common/hal/google_camera_hal/dual_ir_capture_session.h
index 9d14d18..66236c9 100644
--- a/common/hal/google_camera_hal/dual_ir_capture_session.h
+++ b/common/hal/google_camera_hal/dual_ir_capture_session.h
@@ -59,7 +59,7 @@
       CameraDeviceSessionHwl* device_session_hwl,
       const StreamConfiguration& stream_config,
       ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-      HwlRequestBuffersFunc request_stream_buffers,
+      HwlSessionCallback session_callback,
       std::vector<HalStream>* hal_configured_streams,
       CameraBufferAllocatorHwl* camera_allocator_hwl);
 
diff --git a/common/hal/google_camera_hal/hdrplus_capture_session.cc b/common/hal/google_camera_hal/hdrplus_capture_session.cc
index aede085..4e05d13 100644
--- a/common/hal/google_camera_hal/hdrplus_capture_session.cc
+++ b/common/hal/google_camera_hal/hdrplus_capture_session.cc
@@ -17,15 +17,16 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_HdrplusCaptureSession"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#include "hdrplus_capture_session.h"
+
 #include <cutils/properties.h>
+#include <inttypes.h>
 #include <log/log.h>
 #include <utils/Trace.h>
 
-#include <inttypes.h>
 #include <set>
 
 #include "hal_utils.h"
-#include "hdrplus_capture_session.h"
 #include "hdrplus_process_block.h"
 #include "hdrplus_request_processor.h"
 #include "hdrplus_result_processor.h"
@@ -80,7 +81,7 @@
     CameraDeviceSessionHwl* device_session_hwl,
     const StreamConfiguration& stream_config,
     ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-    HwlRequestBuffersFunc /*request_stream_buffers*/,
+    HwlSessionCallback /*session_callback*/,
     std::vector<HalStream>* hal_configured_streams,
     CameraBufferAllocatorHwl* /*camera_allocator_hwl*/) {
   ATRACE_CALL();
@@ -357,7 +358,8 @@
   }
 
   // Create realtime request processor.
-  request_processor_ = RealtimeZslRequestProcessor::Create(device_session_hwl_);
+  request_processor_ = RealtimeZslRequestProcessor::Create(
+      device_session_hwl_, HAL_PIXEL_FORMAT_RAW10);
   if (request_processor_ == nullptr) {
     ALOGE("%s: Creating RealtimeZslsRequestProcessor failed.", __FUNCTION__);
     return UNKNOWN_ERROR;
@@ -373,7 +375,7 @@
 
   // Create realtime result processor.
   auto result_processor = RealtimeZslResultProcessor::Create(
-      internal_stream_manager_.get(), *raw_stream_id);
+      internal_stream_manager_.get(), *raw_stream_id, HAL_PIXEL_FORMAT_RAW10);
   if (result_processor == nullptr) {
     ALOGE("%s: Creating RealtimeZslResultProcessor failed.", __FUNCTION__);
     return UNKNOWN_ERROR;
diff --git a/common/hal/google_camera_hal/hdrplus_capture_session.h b/common/hal/google_camera_hal/hdrplus_capture_session.h
index d346c3b..2519541 100644
--- a/common/hal/google_camera_hal/hdrplus_capture_session.h
+++ b/common/hal/google_camera_hal/hdrplus_capture_session.h
@@ -58,7 +58,7 @@
       CameraDeviceSessionHwl* device_session_hwl,
       const StreamConfiguration& stream_config,
       ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-      HwlRequestBuffersFunc request_stream_buffers,
+      HwlSessionCallback session_callback,
       std::vector<HalStream>* hal_configured_streams,
       CameraBufferAllocatorHwl* camera_allocator_hwl = nullptr);
 
diff --git a/common/hal/google_camera_hal/pending_requests_tracker.cc b/common/hal/google_camera_hal/pending_requests_tracker.cc
index 27e5a0a..87ad722 100644
--- a/common/hal/google_camera_hal/pending_requests_tracker.cc
+++ b/common/hal/google_camera_hal/pending_requests_tracker.cc
@@ -26,7 +26,8 @@
 namespace google_camera_hal {
 
 std::unique_ptr<PendingRequestsTracker> PendingRequestsTracker::Create(
-    const std::vector<HalStream>& hal_configured_streams) {
+    const std::vector<HalStream>& hal_configured_streams,
+    const std::unordered_map<int32_t, int32_t>& grouped_stream_id_map) {
   auto tracker =
       std::unique_ptr<PendingRequestsTracker>(new PendingRequestsTracker());
   if (tracker == nullptr) {
@@ -34,7 +35,8 @@
     return nullptr;
   }
 
-  status_t res = tracker->Initialize(hal_configured_streams);
+  status_t res =
+      tracker->Initialize(hal_configured_streams, grouped_stream_id_map);
   if (res != OK) {
     ALOGE("%s: Initializing stream buffer tracker failed: %s(%d)", __FUNCTION__,
           strerror(-res), res);
@@ -45,18 +47,24 @@
 }
 
 status_t PendingRequestsTracker::Initialize(
-    const std::vector<HalStream>& hal_configured_streams) {
+    const std::vector<HalStream>& hal_configured_streams,
+    const std::unordered_map<int32_t, int32_t>& grouped_stream_id_map) {
+  grouped_stream_id_map_ = grouped_stream_id_map;
   for (auto& hal_stream : hal_configured_streams) {
-    auto [max_buffer_it, max_buffer_inserted] =
-        stream_max_buffers_.emplace(hal_stream.id, hal_stream.max_buffers);
-    if (!max_buffer_inserted) {
-      ALOGE("%s: There are duplicated stream id %d", __FUNCTION__,
-            hal_stream.id);
-      return BAD_VALUE;
-    }
+    int hal_stream_id = OverrideStreamIdForGroup(hal_stream.id);
+    // For grouped hal streams, only use one stream to represent the whole group
+    if (hal_stream_id == hal_stream.id) {
+      auto [max_buffer_it, max_buffer_inserted] =
+          stream_max_buffers_.emplace(hal_stream_id, hal_stream.max_buffers);
+      if (!max_buffer_inserted) {
+        ALOGE("%s: There are duplicated stream id %d", __FUNCTION__,
+              hal_stream_id);
+        return BAD_VALUE;
+      }
 
-    stream_pending_buffers_.emplace(hal_stream.id, /*pending_buffers=*/0);
-    stream_acquired_buffers_.emplace(hal_stream.id, /*pending_buffers=*/0);
+      stream_pending_buffers_.emplace(hal_stream_id, /*pending_buffers=*/0);
+      stream_acquired_buffers_.emplace(hal_stream_id, /*pending_buffers=*/0);
+    }
   }
 
   return OK;
@@ -66,12 +74,20 @@
   return stream_max_buffers_.find(stream_id) != stream_max_buffers_.end();
 }
 
+int32_t PendingRequestsTracker::OverrideStreamIdForGroup(int32_t stream_id) const {
+  if (grouped_stream_id_map_.count(stream_id) == 1) {
+    return grouped_stream_id_map_.at(stream_id);
+  } else {
+    return stream_id;
+  }
+}
+
 void PendingRequestsTracker::TrackRequestBuffersLocked(
     const std::vector<StreamBuffer>& requested_buffers) {
   ATRACE_CALL();
 
   for (auto& buffer : requested_buffers) {
-    int32_t stream_id = buffer.stream_id;
+    int32_t stream_id = OverrideStreamIdForGroup(buffer.stream_id);
     if (!IsStreamConfigured(stream_id)) {
       ALOGW("%s: stream %d was not configured.", __FUNCTION__, stream_id);
       // Continue to track other buffers.
@@ -89,7 +105,7 @@
   {
     std::lock_guard<std::mutex> lock(pending_requests_mutex_);
     for (auto& buffer : returned_buffers) {
-      int32_t stream_id = buffer.stream_id;
+      int32_t stream_id = OverrideStreamIdForGroup(buffer.stream_id);
       if (!IsStreamConfigured(stream_id)) {
         ALOGW("%s: stream %d was not configured.", __FUNCTION__, stream_id);
         // Continue to track other buffers.
@@ -104,6 +120,10 @@
       }
 
       stream_pending_buffers_[stream_id]--;
+      if (stream_pending_buffers_[stream_id] == 0) {
+        ALOGV("%s: stream %d all pending buffers have been returned.",
+              __FUNCTION__, stream_id);
+      }
     }
   }
 
@@ -118,7 +138,7 @@
   {
     std::lock_guard<std::mutex> lock(pending_acquisition_mutex_);
     for (auto& buffer : returned_buffers) {
-      int32_t stream_id = buffer.stream_id;
+      int32_t stream_id = OverrideStreamIdForGroup(buffer.stream_id);
       if (!IsStreamConfigured(stream_id)) {
         ALOGW("%s: stream %d was not configured.", __FUNCTION__, stream_id);
         // Continue to track other buffers.
@@ -126,8 +146,19 @@
       }
 
       if (stream_acquired_buffers_[stream_id] == 0) {
-        ALOGE("%s: stream %d should not have any pending acquired buffers.",
-              __FUNCTION__, stream_id);
+        if (buffer.status == BufferStatus::kOk) {
+          ALOGE("%s: stream %d should not have any pending acquired buffers.",
+                __FUNCTION__, stream_id);
+        } else {
+          // This may indicate that HAL doesn't intend to process a certain
+          // buffer, so the buffer isn't sent to pipeline and it's not
+          // explicitly allocated and recorded in buffer cache manager.
+          // The buffer still needs to return to framework with an error status
+          // if HAL doesn't process it.
+          ALOGV(
+              "%s: stream %d isn't acquired but returned with buffer status %u",
+              __FUNCTION__, stream_id, buffer.status);
+        }
         // Continue to track other buffers.
         continue;
       }
@@ -140,10 +171,15 @@
   return OK;
 }
 
+void PendingRequestsTracker::OnBufferCacheFlushed() {
+  std::unique_lock<std::mutex> lock(pending_requests_mutex_);
+  requested_stream_ids_.clear();
+}
+
 bool PendingRequestsTracker::DoStreamsHaveEnoughBuffersLocked(
     const std::vector<StreamBuffer>& buffers) const {
   for (auto& buffer : buffers) {
-    int32_t stream_id = buffer.stream_id;
+    int32_t stream_id = OverrideStreamIdForGroup(buffer.stream_id);
     if (!IsStreamConfigured(stream_id)) {
       ALOGE("%s: stream %d was not configured.", __FUNCTION__, stream_id);
       return false;
@@ -186,10 +222,17 @@
   }
 
   for (auto& buffer : requested_buffers) {
-    int32_t stream_id = buffer.stream_id;
+    int32_t stream_id = OverrideStreamIdForGroup(buffer.stream_id);
     auto stream_id_iter = requested_stream_ids_.find(stream_id);
     if (stream_id_iter == requested_stream_ids_.end()) {
       first_requested_stream_ids->push_back(stream_id);
+
+      // Include all stream IDs in the same group in first_requested_stream_ids
+      for (auto& [id_in_group, group_stream_id] : grouped_stream_id_map_) {
+        if (group_stream_id == stream_id) {
+          first_requested_stream_ids->push_back(id_in_group);
+        }
+      }
       requested_stream_ids_.emplace(stream_id);
     }
   }
@@ -236,8 +279,11 @@
     int32_t stream_id, uint32_t num_buffers) {
   ATRACE_CALL();
 
-  if (!IsStreamConfigured(stream_id)) {
-    ALOGW("%s: stream %d was not configured.", __FUNCTION__, stream_id);
+  int32_t overridden_stream_id = OverrideStreamIdForGroup(stream_id);
+
+  if (!IsStreamConfigured(overridden_stream_id)) {
+    ALOGW("%s: stream %d was not configured.", __FUNCTION__,
+          overridden_stream_id);
     // Continue to track other buffers.
     return BAD_VALUE;
   }
@@ -245,29 +291,31 @@
   std::unique_lock<std::mutex> lock(pending_acquisition_mutex_);
   if (!tracker_acquisition_condition_.wait_for(
           lock, std::chrono::milliseconds(kAcquireBufferTimeoutMs),
-          [this, stream_id, num_buffers] {
-            return DoesStreamHaveEnoughBuffersToAcquireLocked(stream_id,
-                                                              num_buffers);
+          [this, overridden_stream_id, num_buffers] {
+            return DoesStreamHaveEnoughBuffersToAcquireLocked(
+                overridden_stream_id, num_buffers);
           })) {
     ALOGW("%s: Waiting to acquire buffer timed out.", __FUNCTION__);
     return TIMED_OUT;
   }
 
-  stream_acquired_buffers_[stream_id] += num_buffers;
+  stream_acquired_buffers_[overridden_stream_id] += num_buffers;
 
   return OK;
 }
 
 void PendingRequestsTracker::TrackBufferAcquisitionFailure(int32_t stream_id,
                                                            uint32_t num_buffers) {
-  if (!IsStreamConfigured(stream_id)) {
-    ALOGW("%s: stream %d was not configured.", __FUNCTION__, stream_id);
+  int32_t overridden_stream_id = OverrideStreamIdForGroup(stream_id);
+  if (!IsStreamConfigured(overridden_stream_id)) {
+    ALOGW("%s: stream %d was not configured.", __FUNCTION__,
+          overridden_stream_id);
     // Continue to track other buffers.
     return;
   }
 
   std::unique_lock<std::mutex> lock(pending_acquisition_mutex_);
-  stream_acquired_buffers_[stream_id] -= num_buffers;
+  stream_acquired_buffers_[overridden_stream_id] -= num_buffers;
 }
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/pending_requests_tracker.h b/common/hal/google_camera_hal/pending_requests_tracker.h
index dc26212..cdff661 100644
--- a/common/hal/google_camera_hal/pending_requests_tracker.h
+++ b/common/hal/google_camera_hal/pending_requests_tracker.h
@@ -32,7 +32,8 @@
 class PendingRequestsTracker {
  public:
   static std::unique_ptr<PendingRequestsTracker> Create(
-      const std::vector<HalStream>& hal_configured_streams);
+      const std::vector<HalStream>& hal_configured_streams,
+      const std::unordered_map<int32_t, int32_t>& grouped_stream_id_map);
 
   // Wait until the requested streams have enough buffers and track
   // the requested buffers.
@@ -59,6 +60,9 @@
   status_t TrackReturnedAcquiredBuffers(
       const std::vector<StreamBuffer>& returned_buffers);
 
+  // Notify the request tracker that the buffer cache manager has been flushed.
+  void OnBufferCacheFlushed();
+
   virtual ~PendingRequestsTracker() = default;
 
  protected:
@@ -72,7 +76,9 @@
   static constexpr uint32_t kAcquireBufferTimeoutMs = 50;
 
   // Initialize the tracker.
-  status_t Initialize(const std::vector<HalStream>& hal_configured_streams);
+  status_t Initialize(
+      const std::vector<HalStream>& hal_configured_streams,
+      const std::unordered_map<int32_t, int32_t>& grouped_stream_id_map);
 
   // Return if all the buffers' streams have enough buffers to be requested.
   // Must be protected with pending_requests_mutex_.
@@ -99,6 +105,10 @@
   // Return if a stream ID is configured when Create() was called.
   bool IsStreamConfigured(int32_t stream_id) const;
 
+  // If the stream is part of a stream group, return the single stream id
+  // representing the group. Otherwise, return the id that's passed in.
+  int32_t OverrideStreamIdForGroup(int32_t stream_id) const;
+
   // Map from stream ID to the stream's max number of buffers.
   std::unordered_map<int32_t, uint32_t> stream_max_buffers_;
 
@@ -126,6 +136,11 @@
   // Contains the stream IDs that have been requested previously.
   // Must be protected with pending_requests_mutex_.
   std::unordered_set<int32_t> requested_stream_ids_;
+
+  // Map from stream IDs within a stream group to one stream ID for tracking
+  // purposes. For multi-resolution output, the HWL gets to decide which stream
+  // within a stream group outputs images.
+  std::unordered_map<int32_t, int32_t> grouped_stream_id_map_;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/realtime_zsl_request_processor.cc b/common/hal/google_camera_hal/realtime_zsl_request_processor.cc
index e8e04fc..1250496 100644
--- a/common/hal/google_camera_hal/realtime_zsl_request_processor.cc
+++ b/common/hal/google_camera_hal/realtime_zsl_request_processor.cc
@@ -17,34 +17,118 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_RealtimeZslRequestProcessor"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#include "realtime_zsl_request_processor.h"
+
 #include <log/log.h>
 #include <utils/Trace.h>
 
+#include <cstdint>
+#include <shared_mutex>
+
+#include "hal_types.h"
 #include "hal_utils.h"
-#include "realtime_zsl_request_processor.h"
+#include "system/graphics-base-v1.0.h"
+#include "utils/Errors.h"
 #include "vendor_tag_defs.h"
 
 namespace android {
 namespace google_camera_hal {
 
+namespace {
+// The width and height will be selected according to the following priority.
+// 1. select the JPEG size if it is in the supported output YUV list
+// 2. select the smallest output YUV size that
+//    1) has width/height ratio between the width/height ratio of JPEG and the
+//       max available output size
+//    2) is larger than JPEG size
+status_t SelectWidthAndHeight(uint32_t jpeg_width, uint32_t jpeg_height,
+                              CameraDeviceSessionHwl& device_session_hwl,
+                              uint32_t& selected_width,
+                              uint32_t& selected_height) {
+  std::unique_ptr<HalCameraMetadata> characteristics;
+  status_t res = device_session_hwl.GetCameraCharacteristics(&characteristics);
+  if (res != OK) {
+    ALOGE("%s: GetCameraCharacteristics failed.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  camera_metadata_ro_entry entry;
+  res = characteristics->Get(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                             &entry);
+  if (res != OK) {
+    ALOGE("%s: Not able to get ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS",
+          __FUNCTION__);
+    return BAD_VALUE;
+  }
+  int available_size = entry.count / 4;
+
+  uint32_t max_width = 0, max_height = 0;
+  for (int i = 0; i < available_size; ++i) {
+    if (entry.data.i32[4 * i] ==
+            android_pixel_format_t::HAL_PIXEL_FORMAT_YCBCR_420_888 &&
+        entry.data.i32[4 * i + 3] == static_cast<int32_t>(StreamType::kOutput)) {
+      uint32_t width = static_cast<uint32_t>(entry.data.i32[1 + 4 * i]);
+      uint32_t height = static_cast<uint32_t>(entry.data.i32[2 + 4 * i]);
+
+      // The request JPEG size is a supported output YUV size. It is our top
+      // choice, return directly if found.
+      if (jpeg_width == width && jpeg_height == height) {
+        selected_height = jpeg_height;
+        selected_width = jpeg_width;
+        return OK;
+      }
+      max_width = std::max(max_width, width);
+      max_height = std::max(max_height, height);
+    }
+  }
+
+  bool selected = false;
+  for (int i = 0; i < available_size; i++) {
+    if (entry.data.i32[4 * i] ==
+            android_pixel_format_t::HAL_PIXEL_FORMAT_YCBCR_420_888 &&
+        entry.data.i32[4 * i + 3] == static_cast<int32_t>(StreamType::kOutput)) {
+      uint32_t width = static_cast<uint32_t>(entry.data.i32[1 + 4 * i]);
+      uint32_t height = static_cast<uint32_t>(entry.data.i32[2 + 4 * i]);
+      if (width < jpeg_width || height < jpeg_height) {
+        // YUV size is smaller than JPEG size
+        continue;
+      }
+      if (selected && width > selected_width) {
+        // Already found a smaller YUV size that fulfill the request
+        continue;
+      }
+
+      // Select the smallest supported YUV size with width/height ratio between
+      // JPEG and the max available output size. It is our second choice.
+      if ((jpeg_height * width - jpeg_width * height) *
+              (height * max_width - width * max_height) >=
+          0) {
+        selected = true;
+        selected_height = height;
+        selected_width = width;
+      }
+    }
+  }
+  if (!selected) {
+    // Select the largest supported YUV size. That is our last choice.
+    selected_height = max_height;
+    selected_width = max_width;
+  }
+  return OK;
+}
+}  // namespace
+
 std::unique_ptr<RealtimeZslRequestProcessor> RealtimeZslRequestProcessor::Create(
-    CameraDeviceSessionHwl* device_session_hwl) {
+    CameraDeviceSessionHwl* device_session_hwl,
+    android_pixel_format_t pixel_format) {
   ATRACE_CALL();
   if (device_session_hwl == nullptr) {
     ALOGE("%s: device_session_hwl is nullptr", __FUNCTION__);
     return nullptr;
   }
 
-  uint32_t num_physical_cameras =
-      device_session_hwl->GetPhysicalCameraIds().size();
-  if (num_physical_cameras > 1) {
-    ALOGE("%s: only support 1 physical camera but it has %u", __FUNCTION__,
-          num_physical_cameras);
-    return nullptr;
-  }
-
   auto request_processor = std::unique_ptr<RealtimeZslRequestProcessor>(
-      new RealtimeZslRequestProcessor());
+      new RealtimeZslRequestProcessor(pixel_format, device_session_hwl));
   if (request_processor == nullptr) {
     ALOGE("%s: Creating RealtimeZslRequestProcessor failed.", __FUNCTION__);
     return nullptr;
@@ -83,10 +167,11 @@
           res);
     return res;
   }
-
-  res = characteristics->Get(VendorTagIds::kHdrUsageMode, &entry);
-  if (res == OK) {
-    hdr_mode_ = static_cast<HdrMode>(entry.data.u8[0]);
+  if (pixel_format_ == android_pixel_format_t::HAL_PIXEL_FORMAT_RAW10) {
+    res = characteristics->Get(VendorTagIds::kHdrUsageMode, &entry);
+    if (res == OK) {
+      hdr_mode_ = static_cast<HdrMode>(entry.data.u8[0]);
+    }
   }
 
   return OK;
@@ -106,30 +191,61 @@
     return BAD_VALUE;
   }
 
-  // Register internal raw stream
-  Stream raw_stream;
-  raw_stream.stream_type = StreamType::kOutput;
-  raw_stream.width = active_array_width_;
-  raw_stream.height = active_array_height_;
-  raw_stream.format = HAL_PIXEL_FORMAT_RAW10;
-  raw_stream.usage = 0;
-  raw_stream.rotation = StreamRotation::kRotation0;
-  raw_stream.data_space = HAL_DATASPACE_ARBITRARY;
+  // For YUV ZSL, we will use the JPEG size for ZSL buffer size. We already
+  // checked the size is supported in capture session.
+  if (pixel_format_ == android_pixel_format_t::HAL_PIXEL_FORMAT_YCBCR_420_888) {
+    for (const auto& stream : stream_config.streams) {
+      if (utils::IsJPEGSnapshotStream(stream) ||
+          utils::IsYUVSnapshotStream(stream)) {
+        if (SelectWidthAndHeight(stream.width, stream.height,
+                                 *device_session_hwl_, active_array_width_,
+                                 active_array_height_) != OK) {
+          ALOGE("%s: failed to select ZSL YUV buffer width and height",
+                __FUNCTION__);
+          return BAD_VALUE;
+        }
+        ALOGI("%s, Snapshot size is (%d x %d), selected size is (%d x %d)",
+              __FUNCTION__, stream.width, stream.height, active_array_width_,
+              active_array_height_);
+        break;
+      }
+    }
+  }
+
+  // Register internal stream
+  Stream stream_to_add;
+  stream_to_add.stream_type = StreamType::kOutput;
+  stream_to_add.width = active_array_width_;
+  stream_to_add.height = active_array_height_;
+  stream_to_add.format = pixel_format_;
+  stream_to_add.usage = 0;
+  stream_to_add.rotation = StreamRotation::kRotation0;
+  stream_to_add.data_space = HAL_DATASPACE_ARBITRARY;
+  // For YUV ZSL buffer, if the stream configuration constains physical stream,
+  // we will add the new stream as physical stream. As we support physical
+  // streams only or logical streams only combination. We can check the stream
+  // type of the first stream in the list.
+  if (pixel_format_ == android_pixel_format_t::HAL_PIXEL_FORMAT_YCBCR_420_888 &&
+      stream_config.streams[0].is_physical_camera_stream) {
+    stream_to_add.is_physical_camera_stream = true;
+    stream_to_add.physical_camera_id =
+        stream_config.streams[0].physical_camera_id;
+  }
 
   status_t result = internal_stream_manager->RegisterNewInternalStream(
-      raw_stream, &raw_stream_id_);
+      stream_to_add, &stream_id_);
   if (result != OK) {
     ALOGE("%s: RegisterNewInternalStream failed.", __FUNCTION__);
     return UNKNOWN_ERROR;
   }
 
   internal_stream_manager_ = internal_stream_manager;
-  // Set id back to raw_stream and then HWL can get correct HAL stream ID
-  raw_stream.id = raw_stream_id_;
+  // Set id back to stream and then HWL can get correct HAL stream ID
+  stream_to_add.id = stream_id_;
 
   process_block_stream_config->streams = stream_config.streams;
-  // Add internal RAW stream
-  process_block_stream_config->streams.push_back(raw_stream);
+  // Add internal stream
+  process_block_stream_config->streams.push_back(stream_to_add);
   process_block_stream_config->operation_mode = stream_config.operation_mode;
   process_block_stream_config->session_params =
       HalCameraMetadata::Clone(stream_config.session_params.get());
@@ -147,7 +263,7 @@
     return BAD_VALUE;
   }
 
-  std::lock_guard<std::mutex> lock(process_block_lock_);
+  std::lock_guard lock(process_block_lock_);
   if (process_block_ != nullptr) {
     ALOGE("%s: Already configured.", __FUNCTION__);
     return ALREADY_EXISTS;
@@ -160,7 +276,7 @@
 status_t RealtimeZslRequestProcessor::ProcessRequest(
     const CaptureRequest& request) {
   ATRACE_CALL();
-  std::lock_guard<std::mutex> lock(process_block_lock_);
+  std::shared_lock lock(process_block_lock_);
   if (process_block_ == nullptr) {
     ALOGE("%s: Not configured yet.", __FUNCTION__);
     return NO_INIT;
@@ -208,13 +324,13 @@
         HalCameraMetadata::Clone(physical_metadata.get());
   }
 
-  if (is_hdrplus_zsl_enabled_) {
-    // Get one RAW bffer from internal stream manager
+  if (is_hdrplus_zsl_enabled_ ||
+      pixel_format_ == android_pixel_format_t::HAL_PIXEL_FORMAT_YCBCR_420_888) {
+    // Get one bffer from internal stream manager
     StreamBuffer buffer = {};
     status_t result;
     if (preview_intent_seen_) {
-      result =
-          internal_stream_manager_->GetStreamBuffer(raw_stream_id_, &buffer);
+      result = internal_stream_manager_->GetStreamBuffer(stream_id_, &buffer);
       if (result != OK) {
         ALOGE("%s: frame:%d GetStreamBuffer failed.", __FUNCTION__,
               request.frame_number);
@@ -222,12 +338,12 @@
       }
     }
 
-    // Add RAW output to capture request
+    // Add output to capture request
     if (preview_intent_seen_) {
       block_request.output_buffers.push_back(buffer);
     }
 
-    if (block_request.settings != nullptr) {
+    if (block_request.settings != nullptr && is_hdrplus_zsl_enabled_) {
       bool enable_hybrid_ae =
           (hdr_mode_ == HdrMode::kNonHdrplusMode ? false : true);
       result = hal_utils::ModifyRealtimeRequestForHdrplus(
@@ -250,13 +366,12 @@
 
   std::vector<ProcessBlockRequest> block_requests(1);
   block_requests[0].request = std::move(block_request);
-
   return process_block_->ProcessRequests(block_requests, request);
 }
 
 status_t RealtimeZslRequestProcessor::Flush() {
   ATRACE_CALL();
-  std::lock_guard<std::mutex> lock(process_block_lock_);
+  std::shared_lock lock(process_block_lock_);
   if (process_block_ == nullptr) {
     return OK;
   }
diff --git a/common/hal/google_camera_hal/realtime_zsl_request_processor.h b/common/hal/google_camera_hal/realtime_zsl_request_processor.h
index 8779d1f..b8be026 100644
--- a/common/hal/google_camera_hal/realtime_zsl_request_processor.h
+++ b/common/hal/google_camera_hal/realtime_zsl_request_processor.h
@@ -17,6 +17,7 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_REALTIME_ZSL_REQUEST_PROCESSOR_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_REALTIME_ZSL_REQUEST_PROCESSOR_H_
 
+#include <shared_mutex>
 #include "process_block.h"
 #include "request_processor.h"
 #include "vendor_tag_types.h"
@@ -31,7 +32,8 @@
   // device_session_hwl is owned by the caller and must be valid during the
   // lifetime of this RealtimeZslRequestProcessor.
   static std::unique_ptr<RealtimeZslRequestProcessor> Create(
-      CameraDeviceSessionHwl* device_session_hwl);
+      CameraDeviceSessionHwl* device_session_hwl,
+      android_pixel_format_t pixel_format);
 
   virtual ~RealtimeZslRequestProcessor() = default;
 
@@ -54,25 +56,31 @@
   // Override functions of RequestProcessor end.
 
  protected:
-  RealtimeZslRequestProcessor() = default;
+  RealtimeZslRequestProcessor(android_pixel_format_t pixel_format,
+                              CameraDeviceSessionHwl* device_session_hwl)
+      : pixel_format_(pixel_format),
+        device_session_hwl_(device_session_hwl),
+        is_hdrplus_zsl_enabled_(pixel_format == HAL_PIXEL_FORMAT_RAW10){};
 
  private:
   status_t Initialize(CameraDeviceSessionHwl* device_session_hwl);
-  std::mutex process_block_lock_;
+  std::shared_mutex process_block_lock_;
 
   // Protected by process_block_lock_.
   std::unique_ptr<ProcessBlock> process_block_;
 
-  InternalStreamManager* internal_stream_manager_;
+  InternalStreamManager* internal_stream_manager_ = nullptr;
+  android_pixel_format_t pixel_format_;
+  CameraDeviceSessionHwl* device_session_hwl_ = nullptr;
   bool preview_intent_seen_ = false;
-  int32_t raw_stream_id_ = -1;
+  int32_t stream_id_ = -1;
   uint32_t active_array_width_ = 0;
   uint32_t active_array_height_ = 0;
 
   HdrMode hdr_mode_ = HdrMode::kHdrplusMode;
 
   // If HDR+ ZSL is enabled.
-  bool is_hdrplus_zsl_enabled_ = true;
+  bool is_hdrplus_zsl_enabled_ = false;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/realtime_zsl_result_processor.cc b/common/hal/google_camera_hal/realtime_zsl_result_processor.cc
index 92fe396..d337f88 100644
--- a/common/hal/google_camera_hal/realtime_zsl_result_processor.cc
+++ b/common/hal/google_camera_hal/realtime_zsl_result_processor.cc
@@ -17,19 +17,21 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_RealtimeZslResultProcessor"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+
+#include "realtime_zsl_result_processor.h"
+
+#include <inttypes.h>
 #include <log/log.h>
 #include <utils/Trace.h>
 
-#include <inttypes.h>
-
 #include "hal_utils.h"
-#include "realtime_zsl_result_processor.h"
 
 namespace android {
 namespace google_camera_hal {
 
 std::unique_ptr<RealtimeZslResultProcessor> RealtimeZslResultProcessor::Create(
-    InternalStreamManager* internal_stream_manager, int32_t raw_stream_id) {
+    InternalStreamManager* internal_stream_manager, int32_t stream_id,
+    android_pixel_format_t pixel_format, uint32_t partial_result_count) {
   ATRACE_CALL();
   if (internal_stream_manager == nullptr) {
     ALOGE("%s: internal_stream_manager is nullptr.", __FUNCTION__);
@@ -37,7 +39,8 @@
   }
 
   auto result_processor = std::unique_ptr<RealtimeZslResultProcessor>(
-      new RealtimeZslResultProcessor(internal_stream_manager, raw_stream_id));
+      new RealtimeZslResultProcessor(internal_stream_manager, stream_id,
+                                     pixel_format, partial_result_count));
   if (result_processor == nullptr) {
     ALOGE("%s: Creating RealtimeZslResultProcessor failed.", __FUNCTION__);
     return nullptr;
@@ -47,9 +50,12 @@
 }
 
 RealtimeZslResultProcessor::RealtimeZslResultProcessor(
-    InternalStreamManager* internal_stream_manager, int32_t raw_stream_id) {
+    InternalStreamManager* internal_stream_manager, int32_t stream_id,
+    android_pixel_format_t pixel_format, uint32_t partial_result_count) {
   internal_stream_manager_ = internal_stream_manager;
-  raw_stream_id_ = raw_stream_id;
+  stream_id_ = stream_id;
+  pixel_format_ = pixel_format;
+  partial_result_count_ = partial_result_count;
 }
 
 void RealtimeZslResultProcessor::SetResultCallback(
@@ -153,8 +159,10 @@
     return BAD_VALUE;
   }
 
-  SaveFdForHdrplus(remaining_session_request);
-  SaveLsForHdrplus(remaining_session_request);
+  if (pixel_format_ == HAL_PIXEL_FORMAT_RAW10) {
+    SaveFdForHdrplus(remaining_session_request);
+    SaveLsForHdrplus(remaining_session_request);
+  }
 
   return OK;
 }
@@ -176,12 +184,12 @@
 
   // Return filled raw buffer to internal stream manager
   // And remove raw buffer from result
-  bool raw_output = false;
+  bool returned_output = false;
   status_t res;
   std::vector<StreamBuffer> modified_output_buffers;
   for (uint32_t i = 0; i < result->output_buffers.size(); i++) {
-    if (raw_stream_id_ == result->output_buffers[i].stream_id) {
-      raw_output = true;
+    if (stream_id_ == result->output_buffers[i].stream_id) {
+      returned_output = true;
       res = internal_stream_manager_->ReturnFilledBuffer(
           result->frame_number, result->output_buffers[i]);
       if (res != OK) {
@@ -199,37 +207,45 @@
   }
 
   if (result->result_metadata) {
+    result->result_metadata->Erase(ANDROID_CONTROL_ENABLE_ZSL);
+
     res = internal_stream_manager_->ReturnMetadata(
-        raw_stream_id_, result->frame_number, result->result_metadata.get());
+        stream_id_, result->frame_number, result->result_metadata.get(),
+        result->partial_result);
     if (res != OK) {
       ALOGW("%s: (%d)ReturnMetadata fail", __FUNCTION__, result->frame_number);
     }
 
-    res = hal_utils::SetEnableZslMetadata(result->result_metadata.get(), false);
-    if (res != OK) {
-      ALOGW("%s: SetEnableZslMetadata (%d) fail", __FUNCTION__,
-            result->frame_number);
-    }
+    if (result->partial_result == partial_result_count_) {
+      res =
+          hal_utils::SetEnableZslMetadata(result->result_metadata.get(), false);
+      if (res != OK) {
+        ALOGW("%s: SetEnableZslMetadata (%d) fail", __FUNCTION__,
+              result->frame_number);
+      }
 
-    res = HandleFdResultForHdrplus(result->frame_number,
-                                   result->result_metadata.get());
-    if (res != OK) {
-      ALOGE("%s: HandleFdResultForHdrplus(%d) fail", __FUNCTION__,
-            result->frame_number);
-      return;
-    }
+      if (pixel_format_ == HAL_PIXEL_FORMAT_RAW10) {
+        res = HandleFdResultForHdrplus(result->frame_number,
+                                       result->result_metadata.get());
+        if (res != OK) {
+          ALOGE("%s: HandleFdResultForHdrplus(%d) fail", __FUNCTION__,
+                result->frame_number);
+          return;
+        }
 
-    res = HandleLsResultForHdrplus(result->frame_number,
-                                   result->result_metadata.get());
-    if (res != OK) {
-      ALOGE("%s: HandleLsResultForHdrplus(%d) fail", __FUNCTION__,
-            result->frame_number);
-      return;
+        res = HandleLsResultForHdrplus(result->frame_number,
+                                       result->result_metadata.get());
+        if (res != OK) {
+          ALOGE("%s: HandleLsResultForHdrplus(%d) fail", __FUNCTION__,
+                result->frame_number);
+          return;
+        }
+      }
     }
   }
 
   // Don't send result to framework if only internal raw callback
-  if (raw_output && result->result_metadata == nullptr &&
+  if (returned_output && result->result_metadata == nullptr &&
       result->output_buffers.size() == 0) {
     return;
   }
diff --git a/common/hal/google_camera_hal/realtime_zsl_result_processor.h b/common/hal/google_camera_hal/realtime_zsl_result_processor.h
index 41ad622..3a59df2 100644
--- a/common/hal/google_camera_hal/realtime_zsl_result_processor.h
+++ b/common/hal/google_camera_hal/realtime_zsl_result_processor.h
@@ -29,7 +29,8 @@
 class RealtimeZslResultProcessor : public ResultProcessor {
  public:
   static std::unique_ptr<RealtimeZslResultProcessor> Create(
-      InternalStreamManager* internal_stream_manager, int32_t raw_stream_id);
+      InternalStreamManager* internal_stream_manager, int32_t stream_id,
+      android_pixel_format_t pixel_format, uint32_t partial_result_count = 1);
 
   virtual ~RealtimeZslResultProcessor() = default;
 
@@ -52,7 +53,9 @@
 
  protected:
   RealtimeZslResultProcessor(InternalStreamManager* internal_stream_manager,
-                             int32_t raw_stream_id);
+                             int32_t stream_id,
+                             android_pixel_format_t pixel_format,
+                             uint32_t partial_result_count);
 
  private:
   // Save face detect mode for HDR+
@@ -72,7 +75,8 @@
   NotifyFunc notify_;
 
   InternalStreamManager* internal_stream_manager_;
-  int32_t raw_stream_id_ = -1;
+  int32_t stream_id_ = -1;
+  android_pixel_format_t pixel_format_;
 
   // Current face detect mode set by framework.
   uint8_t current_face_detect_mode_ = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
@@ -92,6 +96,9 @@
   // by framework. And requested_lens_shading_map_modes_ is protected by
   // lens_shading_lock_
   std::unordered_map<uint32_t, uint8_t> requested_lens_shading_map_modes_;
+
+  // Partial result count reported by HAL
+  uint32_t partial_result_count_;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/rgbird_capture_session.cc b/common/hal/google_camera_hal/rgbird_capture_session.cc
index 41ec461..5ec8e98 100644
--- a/common/hal/google_camera_hal/rgbird_capture_session.cc
+++ b/common/hal/google_camera_hal/rgbird_capture_session.cc
@@ -90,7 +90,7 @@
     CameraDeviceSessionHwl* device_session_hwl,
     const StreamConfiguration& stream_config,
     ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-    HwlRequestBuffersFunc request_stream_buffers,
+    HwlSessionCallback session_callback,
     std::vector<HalStream>* hal_configured_streams,
     CameraBufferAllocatorHwl* /*camera_allocator_hwl*/) {
   ATRACE_CALL();
@@ -103,7 +103,7 @@
 
   status_t res = session->Initialize(
       device_session_hwl, stream_config, process_capture_result, notify,
-      request_stream_buffers, hal_configured_streams);
+      session_callback.request_stream_buffers, hal_configured_streams);
   if (res != OK) {
     ALOGE("%s: Initializing RgbirdCaptureSession failed: %s (%d).",
           __FUNCTION__, strerror(-res), res);
@@ -954,7 +954,7 @@
 
   // TODO(b/128633958): remove this after FLL syncing is verified
   force_internal_stream_ =
-      property_get_bool("persist.camera.rgbird.forceinternal", false);
+      property_get_bool("persist.vendor.camera.rgbird.forceinternal", false);
   if (force_internal_stream_) {
     ALOGI("%s: Force creating internal streams for IR pipelines", __FUNCTION__);
   }
diff --git a/common/hal/google_camera_hal/rgbird_capture_session.h b/common/hal/google_camera_hal/rgbird_capture_session.h
index eb76913..0ba6675 100644
--- a/common/hal/google_camera_hal/rgbird_capture_session.h
+++ b/common/hal/google_camera_hal/rgbird_capture_session.h
@@ -61,7 +61,7 @@
       CameraDeviceSessionHwl* device_session_hwl,
       const StreamConfiguration& stream_config,
       ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-      HwlRequestBuffersFunc request_stream_buffers,
+      HwlSessionCallback session_callback,
       std::vector<HalStream>* hal_configured_streams,
       CameraBufferAllocatorHwl* camera_allocator_hwl = nullptr);
 
diff --git a/common/hal/google_camera_hal/rgbird_result_request_processor.cc b/common/hal/google_camera_hal/rgbird_result_request_processor.cc
index 28e8376..ee41697 100644
--- a/common/hal/google_camera_hal/rgbird_result_request_processor.cc
+++ b/common/hal/google_camera_hal/rgbird_result_request_processor.cc
@@ -43,7 +43,7 @@
 
   // TODO(b/128633958): remove this after FLL syncing is verified
   result_processor->force_internal_stream_ =
-      property_get_bool("persist.camera.rgbird.forceinternal", false);
+      property_get_bool("persist.vendor.camera.rgbird.forceinternal", false);
   if (result_processor->force_internal_stream_) {
     ALOGI("%s: Force creating internal streams for IR pipelines", __FUNCTION__);
   }
diff --git a/common/hal/google_camera_hal/rgbird_rt_request_processor.cc b/common/hal/google_camera_hal/rgbird_rt_request_processor.cc
index bf63b6a..cf2c6a3 100644
--- a/common/hal/google_camera_hal/rgbird_rt_request_processor.cc
+++ b/common/hal/google_camera_hal/rgbird_rt_request_processor.cc
@@ -77,7 +77,7 @@
 
   // TODO(b/128633958): remove this after FLL syncing is verified
   request_processor->force_internal_stream_ =
-      property_get_bool("persist.camera.rgbird.forceinternal", false);
+      property_get_bool("persist.vendor.camera.rgbird.forceinternal", false);
   if (request_processor->force_internal_stream_) {
     ALOGI("%s: Force creating internal streams for IR pipelines", __FUNCTION__);
   }
diff --git a/common/hal/google_camera_hal/snapshot_request_processor.cc b/common/hal/google_camera_hal/snapshot_request_processor.cc
new file mode 100644
index 0000000..bd07b55
--- /dev/null
+++ b/common/hal/google_camera_hal/snapshot_request_processor.cc
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include "system/graphics-base-v1.0.h"
+#define LOG_TAG "GCH_SnapshotRequestProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#include <log/log.h>
+#include <utils/Trace.h>
+
+#include "snapshot_request_processor.h"
+#include "vendor_tag_defs.h"
+
+namespace android {
+namespace google_camera_hal {
+
+std::unique_ptr<SnapshotRequestProcessor> SnapshotRequestProcessor::Create(
+    CameraDeviceSessionHwl* device_session_hwl,
+    HwlSessionCallback session_callback, int32_t yuv_stream_id) {
+  ATRACE_CALL();
+  if (device_session_hwl == nullptr) {
+    ALOGE("%s: device_session_hwl (%p) is nullptr", __FUNCTION__,
+          device_session_hwl);
+    return nullptr;
+  }
+
+  auto request_processor = std::unique_ptr<SnapshotRequestProcessor>(
+      new SnapshotRequestProcessor(session_callback));
+  if (request_processor == nullptr) {
+    ALOGE("%s: Creating SnapshotRequestProcessor failed.", __FUNCTION__);
+    return nullptr;
+  }
+
+  status_t res =
+      request_processor->Initialize(device_session_hwl, yuv_stream_id);
+  if (res != OK) {
+    ALOGE("%s: Initializing SnapshotRequestProcessor failed: %s (%d).",
+          __FUNCTION__, strerror(-res), res);
+    return nullptr;
+  }
+
+  return request_processor;
+}
+
+status_t SnapshotRequestProcessor::Initialize(
+    CameraDeviceSessionHwl* device_session_hwl, int32_t yuv_stream_id) {
+  ATRACE_CALL();
+  std::unique_ptr<HalCameraMetadata> characteristics;
+  status_t res = device_session_hwl->GetCameraCharacteristics(&characteristics);
+  if (res != OK) {
+    ALOGE("%s: GetCameraCharacteristics failed.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  camera_metadata_ro_entry entry;
+  res = characteristics->Get(
+      ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry);
+  if (res == OK) {
+    active_array_width_ = entry.data.i32[2];
+    active_array_height_ = entry.data.i32[3];
+    ALOGI("%s Active size (%d x %d).", __FUNCTION__, active_array_width_,
+          active_array_height_);
+  } else {
+    ALOGE("%s Get active size failed: %s (%d).", __FUNCTION__, strerror(-res),
+          res);
+    return res;
+  }
+
+  yuv_stream_id_ = yuv_stream_id;
+
+  return OK;
+}
+
+status_t SnapshotRequestProcessor::ConfigureStreams(
+    InternalStreamManager* internal_stream_manager,
+    const StreamConfiguration& stream_config,
+    StreamConfiguration* process_block_stream_config) {
+  ATRACE_CALL();
+  if (process_block_stream_config == nullptr ||
+      internal_stream_manager == nullptr) {
+    ALOGE(
+        "%s: process_block_stream_config (%p) is nullptr or "
+        "internal_stream_manager (%p) is nullptr",
+        __FUNCTION__, process_block_stream_config, internal_stream_manager);
+    return BAD_VALUE;
+  }
+
+  internal_stream_manager_ = internal_stream_manager;
+
+  Stream yuv_stream;
+  yuv_stream.stream_type = StreamType::kInput;
+  yuv_stream.width = active_array_width_;
+  yuv_stream.height = active_array_height_;
+  yuv_stream.format = HAL_PIXEL_FORMAT_YCBCR_420_888;
+  yuv_stream.usage = 0;
+  yuv_stream.rotation = StreamRotation::kRotation0;
+  yuv_stream.data_space = HAL_DATASPACE_ARBITRARY;
+  // Set id back to yuv_stream and then HWL can get correct HAL stream ID
+  yuv_stream.id = yuv_stream_id_;
+
+  process_block_stream_config->streams = stream_config.streams;
+  // Add internal yuv stream
+  process_block_stream_config->streams.push_back(yuv_stream);
+  process_block_stream_config->operation_mode = stream_config.operation_mode;
+  process_block_stream_config->session_params =
+      HalCameraMetadata::Clone(stream_config.session_params.get());
+  process_block_stream_config->stream_config_counter =
+      stream_config.stream_config_counter;
+
+  return OK;
+}
+
+status_t SnapshotRequestProcessor::SetProcessBlock(
+    std::unique_ptr<ProcessBlock> process_block) {
+  ATRACE_CALL();
+  if (process_block == nullptr) {
+    ALOGE("%s: process_block is nullptr", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  std::lock_guard<std::mutex> lock(process_block_lock_);
+  if (process_block_ != nullptr) {
+    ALOGE("%s: Already configured.", __FUNCTION__);
+    return ALREADY_EXISTS;
+  }
+
+  process_block_ = std::move(process_block);
+  return OK;
+}
+
+bool SnapshotRequestProcessor::IsReadyForNextRequest() {
+  ATRACE_CALL();
+  if (internal_stream_manager_ == nullptr) {
+    ALOGW("%s: internal_stream_manager_ nullptr", __FUNCTION__);
+    return false;
+  }
+  if (internal_stream_manager_->IsPendingBufferEmpty(yuv_stream_id_) == false) {
+    return false;
+  }
+  return true;
+}
+
+status_t SnapshotRequestProcessor::ProcessRequest(const CaptureRequest& request) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(process_block_lock_);
+  if (process_block_ == nullptr) {
+    ALOGE("%s: Not configured yet.", __FUNCTION__);
+    return NO_INIT;
+  }
+
+  if (IsReadyForNextRequest() == false) {
+    return BAD_VALUE;
+  }
+
+  CaptureRequest block_request;
+  block_request.frame_number = request.frame_number;
+  block_request.settings = HalCameraMetadata::Clone(request.settings.get());
+
+  for (const auto& output_buffer : request.output_buffers) {
+    session_callback_.request_stream_buffers(
+        output_buffer.stream_id, /*buffer_sizes=*/1,
+        &block_request.output_buffers, request.frame_number);
+  }
+
+  for (auto& [camera_id, physical_metadata] : request.physical_camera_settings) {
+    block_request.physical_camera_settings[camera_id] =
+        HalCameraMetadata::Clone(physical_metadata.get());
+  }
+
+  // Get multiple yuv buffer and metadata from internal stream as input
+  status_t result = internal_stream_manager_->GetMostRecentStreamBuffer(
+      yuv_stream_id_, &(block_request.input_buffers),
+      &(block_request.input_buffer_metadata), /*payload_frames=*/kZslBufferSize);
+  if (result != OK) {
+    ALOGE("%s: frame:%d GetStreamBuffer failed.", __FUNCTION__,
+          request.frame_number);
+    session_callback_.return_stream_buffers(block_request.output_buffers);
+    return UNKNOWN_ERROR;
+  }
+
+  // TODO(mhtan): may need to remove some metadata here.
+  std::vector<ProcessBlockRequest> block_requests(1);
+  block_requests[0].request = std::move(block_request);
+  ALOGD("%s: frame number %u is a snapshot request.", __FUNCTION__,
+        request.frame_number);
+
+  result = process_block_->ProcessRequests(block_requests, request);
+  if (result != OK) {
+    session_callback_.return_stream_buffers(
+        block_requests[0].request.output_buffers);
+  }
+
+  return result;
+}
+
+status_t SnapshotRequestProcessor::Flush() {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(process_block_lock_);
+  if (process_block_ == nullptr) {
+    return OK;
+  }
+
+  return process_block_->Flush();
+}
+
+}  // namespace google_camera_hal
+}  // namespace android
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/snapshot_request_processor.h b/common/hal/google_camera_hal/snapshot_request_processor.h
new file mode 100644
index 0000000..a17191a
--- /dev/null
+++ b/common/hal/google_camera_hal/snapshot_request_processor.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_REQUEST_PROCESSOR_H_
+#define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_REQUEST_PROCESSOR_H_
+
+#include "process_block.h"
+#include "request_processor.h"
+
+namespace android {
+namespace google_camera_hal {
+
+// SnapshotRequestProcessor implements a RequestProcessor that adds
+// internal yuv stream as input stream to request and forwards the request to
+// its ProcessBlock.
+class SnapshotRequestProcessor : public RequestProcessor {
+ public:
+  // device_session_hwl is owned by the caller and must be valid during the
+  // lifetime of this SnapshotRequestProcessor.
+  static std::unique_ptr<SnapshotRequestProcessor> Create(
+      CameraDeviceSessionHwl* device_session_hwl,
+      HwlSessionCallback session_callback, int32_t yuv_stream_id);
+
+  virtual ~SnapshotRequestProcessor() = default;
+
+  // Override functions of RequestProcessor start.
+  status_t ConfigureStreams(
+      InternalStreamManager* internal_stream_manager,
+      const StreamConfiguration& stream_config,
+      StreamConfiguration* process_block_stream_config) override;
+
+  status_t SetProcessBlock(std::unique_ptr<ProcessBlock> process_block) override;
+
+  // Adds internal yuv stream as input stream to request and forwards the
+  // request to its ProcessBlock.
+  status_t ProcessRequest(const CaptureRequest& request) override;
+
+  status_t Flush() override;
+  // Override functions of RequestProcessor end.
+
+ protected:
+  explicit SnapshotRequestProcessor(HwlSessionCallback session_callback)
+      : session_callback_(session_callback) {
+  }
+
+ private:
+  status_t Initialize(CameraDeviceSessionHwl* device_session_hwl,
+                      int32_t yuv_stream_id);
+  bool IsReadyForNextRequest();
+
+  static constexpr int kZslBufferSize = 3;
+  std::mutex process_block_lock_;
+
+  // Protected by process_block_lock_.
+  std::unique_ptr<ProcessBlock> process_block_;
+
+  InternalStreamManager* internal_stream_manager_ = nullptr;
+  int32_t yuv_stream_id_ = -1;
+  uint32_t active_array_width_ = 0;
+  uint32_t active_array_height_ = 0;
+
+  HwlSessionCallback session_callback_;
+};
+
+}  // namespace google_camera_hal
+}  // namespace android
+
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_REQUEST_PROCESSOR_H_
diff --git a/common/hal/google_camera_hal/snapshot_result_processor.cc b/common/hal/google_camera_hal/snapshot_result_processor.cc
new file mode 100644
index 0000000..dd79daf
--- /dev/null
+++ b/common/hal/google_camera_hal/snapshot_result_processor.cc
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GCH_SnapshotResultProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#include "snapshot_result_processor.h"
+
+#include <inttypes.h>
+#include <log/log.h>
+#include <utils/Trace.h>
+
+#include "hal_utils.h"
+
+namespace android {
+namespace google_camera_hal {
+
+std::unique_ptr<SnapshotResultProcessor> SnapshotResultProcessor::Create(
+    InternalStreamManager* internal_stream_manager, int32_t yuv_stream_id) {
+  ATRACE_CALL();
+  if (internal_stream_manager == nullptr) {
+    ALOGE("%s: internal_stream_manager nullptr.", __FUNCTION__);
+    return nullptr;
+  }
+
+  auto result_processor = std::unique_ptr<SnapshotResultProcessor>(
+      new SnapshotResultProcessor(internal_stream_manager, yuv_stream_id));
+  if (result_processor == nullptr) {
+    ALOGE("%s: Creating SnapshotResultProcessor failed.", __FUNCTION__);
+    return nullptr;
+  }
+
+  return result_processor;
+}
+
+SnapshotResultProcessor::SnapshotResultProcessor(
+    InternalStreamManager* internal_stream_manager, int32_t yuv_stream_id) {
+  internal_stream_manager_ = internal_stream_manager;
+  yuv_stream_id_ = yuv_stream_id;
+}
+void SnapshotResultProcessor::SetResultCallback(
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(callback_lock_);
+  process_capture_result_ = process_capture_result;
+  notify_ = notify;
+}
+
+status_t SnapshotResultProcessor::AddPendingRequests(
+    const std::vector<ProcessBlockRequest>& process_block_requests,
+    const CaptureRequest& remaining_session_request) {
+  ATRACE_CALL();
+  // This is the last result processor. Validity check if requests contains
+  // all remaining output buffers.
+  if (!hal_utils::AreAllRemainingBuffersRequested(process_block_requests,
+                                                  remaining_session_request)) {
+    ALOGE("%s: Some output buffers will not be completed.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  return OK;
+}
+
+void SnapshotResultProcessor::ProcessResult(ProcessBlockResult block_result) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(callback_lock_);
+
+  std::unique_ptr<CaptureResult> result = std::move(block_result.result);
+  if (result == nullptr) {
+    ALOGW("%s: Received a nullptr result.", __FUNCTION__);
+    return;
+  }
+
+  if (process_capture_result_ == nullptr) {
+    ALOGE("%s: process_capture_result_ is nullptr. Dropping a result.",
+          __FUNCTION__);
+    return;
+  }
+
+  // Return yuv buffer to internal stream manager and remove it from result
+  status_t res;
+  if (result->output_buffers.size() != 0 &&
+      internal_stream_manager_->IsPendingBufferEmpty(yuv_stream_id_) == false) {
+    res = internal_stream_manager_->ReturnZslStreamBuffers(result->frame_number,
+                                                           yuv_stream_id_);
+    if (res != OK) {
+      ALOGE("%s: (%d)ReturnZslStreamBuffers fail", __FUNCTION__,
+            result->frame_number);
+    } else {
+      ALOGI("%s: (%d)ReturnZslStreamBuffers ok", __FUNCTION__,
+            result->frame_number);
+    }
+    result->input_buffers.clear();
+  }
+
+  if (result->result_metadata) {
+    res = hal_utils::SetEnableZslMetadata(result->result_metadata.get(), true);
+    if (res != OK) {
+      ALOGW("%s: SetEnableZslMetadata (%d) fail", __FUNCTION__,
+            result->frame_number);
+    }
+  }
+
+  process_capture_result_(std::move(result));
+}
+
+void SnapshotResultProcessor::Notify(
+    const ProcessBlockNotifyMessage& block_message) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(callback_lock_);
+  if (notify_ == nullptr) {
+    ALOGE("%s: notify_ is nullptr. Dropping a message.", __FUNCTION__);
+    return;
+  }
+
+  notify_(block_message.message);
+}
+
+status_t SnapshotResultProcessor::FlushPendingRequests() {
+  ATRACE_CALL();
+  return INVALID_OPERATION;
+}
+
+}  // namespace google_camera_hal
+}  // namespace android
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/snapshot_result_processor.h b/common/hal/google_camera_hal/snapshot_result_processor.h
new file mode 100644
index 0000000..08ac23e
--- /dev/null
+++ b/common/hal/google_camera_hal/snapshot_result_processor.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_RESULT_PROCESSOR_H_
+#define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_RESULT_PROCESSOR_H_
+
+#include "internal_stream_manager.h"
+#include "result_processor.h"
+
+namespace android {
+namespace google_camera_hal {
+
+// SnapshotResultProcessor implements a ResultProcessor that return
+// yuv buffer to internal stream manager and forwards the results without
+// yuv buffer to its callback functions.
+class SnapshotResultProcessor : public ResultProcessor {
+ public:
+  static std::unique_ptr<SnapshotResultProcessor> Create(
+      InternalStreamManager* internal_stream_manager, int32_t yuv_stream_id);
+
+  virtual ~SnapshotResultProcessor() = default;
+
+  // Override functions of ResultProcessor start.
+  void SetResultCallback(ProcessCaptureResultFunc process_capture_result,
+                         NotifyFunc notify) override;
+
+  status_t AddPendingRequests(
+      const std::vector<ProcessBlockRequest>& process_block_requests,
+      const CaptureRequest& remaining_session_request) override;
+
+  // Return yuv buffer to internal stream manager and forwards the results
+  // without yuv buffer to its callback functions.
+  void ProcessResult(ProcessBlockResult block_result) override;
+
+  void Notify(const ProcessBlockNotifyMessage& block_message) override;
+
+  status_t FlushPendingRequests() override;
+  // Override functions of ResultProcessor end.
+
+ protected:
+  SnapshotResultProcessor(InternalStreamManager* internal_stream_manager,
+                          int32_t yuv_stream_id);
+
+ private:
+  std::mutex callback_lock_;
+
+  // The following callbacks must be protected by callback_lock_.
+  ProcessCaptureResultFunc process_capture_result_;
+  NotifyFunc notify_;
+
+  InternalStreamManager* internal_stream_manager_ = nullptr;
+  int32_t yuv_stream_id_ = -1;
+};
+
+}  // namespace google_camera_hal
+}  // namespace android
+
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_SNAPSHOT_RESULT_PROCESSOR_H_
\ No newline at end of file
diff --git a/common/hal/google_camera_hal/zsl_snapshot_capture_session.cc b/common/hal/google_camera_hal/zsl_snapshot_capture_session.cc
new file mode 100644
index 0000000..077a880
--- /dev/null
+++ b/common/hal/google_camera_hal/zsl_snapshot_capture_session.cc
@@ -0,0 +1,763 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+
+#define LOG_TAG "GCH_ZslSnapshotCaptureSession"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+#include "zsl_snapshot_capture_session.h"
+
+#include <dlfcn.h>
+#include <log/log.h>
+#include <sys/stat.h>
+#include <utils/Trace.h>
+
+#include "hal_utils.h"
+#include "snapshot_request_processor.h"
+#include "snapshot_result_processor.h"
+#include "system/graphics-base-v1.0.h"
+#include "utils.h"
+#include "utils/Errors.h"
+#include "vendor_tag_defs.h"
+
+namespace android {
+namespace google_camera_hal {
+namespace {
+
+#if GCH_HWL_USE_DLOPEN
+// HAL external process block library path
+#if defined(_LP64)
+constexpr char kExternalProcessBlockDir[] =
+    "/vendor/lib64/camera/google_proprietary/";
+#else  // defined(_LP64)
+constexpr char kExternalProcessBlockDir[] =
+    "/vendor/lib/camera/google_proprietary/";
+#endif
+#endif  // GCH_HWL_USE_DLOPEN
+
+bool IsSwDenoiseSnapshotCompatible(const CaptureRequest& request) {
+  if (request.settings == nullptr) {
+    return false;
+  }
+  camera_metadata_ro_entry entry;
+  if (request.settings->Get(ANDROID_CONTROL_CAPTURE_INTENT, &entry) != OK ||
+      *entry.data.u8 != ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
+    ALOGV("%s: ANDROID_CONTROL_CAPTURE_INTENT is not STILL_CAPTURE",
+          __FUNCTION__);
+    return false;
+  }
+
+  if (request.settings->Get(ANDROID_NOISE_REDUCTION_MODE, &entry) != OK ||
+      *entry.data.u8 != ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
+    ALOGI("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ", __FUNCTION__);
+    return false;
+  }
+
+  if (request.settings->Get(ANDROID_EDGE_MODE, &entry) != OK ||
+      *entry.data.u8 != ANDROID_EDGE_MODE_HIGH_QUALITY) {
+    ALOGI("%s: ANDROID_EDGE_MODE is not HQ", __FUNCTION__);
+    return false;
+  }
+
+  if (request.settings->Get(ANDROID_CONTROL_EFFECT_MODE, &entry) != OK ||
+      *entry.data.u8 != ANDROID_CONTROL_EFFECT_MODE_OFF) {
+    ALOGI("%s: ANDROID_CONTROL_EFFECT_MODE is not off", __FUNCTION__);
+    return false;
+  }
+
+  if (request.settings->Get(ANDROID_TONEMAP_MODE, &entry) != OK ||
+      *entry.data.u8 != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
+    ALOGI("%s: ANDROID_TONEMAP_MODE is not HQ", __FUNCTION__);
+    return false;
+  }
+
+  return true;
+}
+}  // namespace
+
+std::unique_ptr<ProcessBlock>
+ZslSnapshotCaptureSession::CreateSnapshotProcessBlock() {
+  ATRACE_CALL();
+#if GCH_HWL_USE_DLOPEN
+  bool found_process_block = false;
+  for (const auto& lib_path :
+       utils::FindLibraryPaths(kExternalProcessBlockDir)) {
+    ALOGI("%s: Loading %s", __FUNCTION__, lib_path.c_str());
+    void* lib_handle = nullptr;
+    lib_handle = dlopen(lib_path.c_str(), RTLD_NOW);
+    if (lib_handle == nullptr) {
+      ALOGW("Failed loading %s.", lib_path.c_str());
+      continue;
+    }
+
+    GetProcessBlockFactoryFunc external_process_block_t =
+        reinterpret_cast<GetProcessBlockFactoryFunc>(
+            dlsym(lib_handle, "GetProcessBlockFactory"));
+    if (external_process_block_t == nullptr) {
+      ALOGE("%s: dlsym failed (%s) when loading %s.", __FUNCTION__,
+            "GetProcessBlockFactoryFunc", lib_path.c_str());
+      dlclose(lib_handle);
+      lib_handle = nullptr;
+      continue;
+    }
+
+    if (external_process_block_t()->GetBlockName() == "SnapshotProcessBlock") {
+      snapshot_process_block_factory_ = external_process_block_t;
+      snapshot_process_block_lib_handle_ = lib_handle;
+      found_process_block = true;
+      break;
+    }
+  }
+  if (!found_process_block) {
+    ALOGE("%s: snapshot process block does not exist", __FUNCTION__);
+    return nullptr;
+  }
+
+  return snapshot_process_block_factory_()->CreateProcessBlock(
+      camera_device_session_hwl_);
+#else
+  if (GetSnapshotProcessBlockFactory == nullptr) {
+    ALOGE("%s: snapshot process block does not exist", __FUNCTION__);
+    return nullptr;
+  }
+  snapshot_process_block_factory_ = GetSnapshotProcessBlockFactory;
+  return GetSnapshotProcessBlockFactory()->CreateProcessBlock(
+      camera_device_session_hwl_);
+#endif
+}
+
+bool ZslSnapshotCaptureSession::IsStreamConfigurationSupported(
+    CameraDeviceSessionHwl* device_session_hwl,
+    const StreamConfiguration& stream_config) {
+  ATRACE_CALL();
+  if (device_session_hwl == nullptr) {
+    ALOGE("%s: device_session_hwl is nullptr", __FUNCTION__);
+    return false;
+  }
+
+  std::unique_ptr<HalCameraMetadata> characteristics;
+  status_t res = device_session_hwl->GetCameraCharacteristics(&characteristics);
+  if (res != OK) {
+    ALOGE("%s: GetCameraCharacteristics failed.", __FUNCTION__);
+    return false;
+  }
+
+  camera_metadata_ro_entry entry;
+  res = characteristics->Get(VendorTagIds::kSwDenoiseEnabled, &entry);
+  if (res != OK || entry.data.u8[0] != 1) {
+    ALOGE("%s: Software denoised not enabled", __FUNCTION__);
+    return false;
+  }
+
+  bool has_jpeg_stream = false;
+  bool has_preview_stream = false;
+  bool has_yuv_stream = false;
+  for (const auto& stream : stream_config.streams) {
+    if (stream.is_physical_camera_stream) {
+      ALOGE("%s: support logical camera only", __FUNCTION__);
+      return false;
+    }
+    if (utils::IsJPEGSnapshotStream(stream)) {
+      has_jpeg_stream = true;
+    } else if (utils::IsPreviewStream(stream)) {
+      has_preview_stream = true;
+    } else if (utils::IsYUVSnapshotStream(stream)) {
+      has_yuv_stream = true;
+    } else {
+      ALOGE("%s: only support preview + (snapshot and/or YUV) streams",
+            __FUNCTION__);
+      return false;
+    }
+  }
+  if (!has_jpeg_stream && !has_yuv_stream) {
+    ALOGE("%s: no JPEG or YUV stream", __FUNCTION__);
+    return false;
+  }
+
+  if (!has_preview_stream) {
+    ALOGE("%s: no preview stream", __FUNCTION__);
+    return false;
+  }
+
+  ALOGD("%s: ZslSnapshotCaptureSession supports the stream config",
+        __FUNCTION__);
+  return true;
+}
+
+std::unique_ptr<CaptureSession> ZslSnapshotCaptureSession::Create(
+    const StreamConfiguration& stream_config,
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* camera_device_session_hwl,
+    std::vector<HalStream>* hal_configured_streams,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify) {
+  ATRACE_CALL();
+  auto session =
+      std::unique_ptr<ZslSnapshotCaptureSession>(new ZslSnapshotCaptureSession(
+          external_capture_session_entries, capture_session_entries,
+          hwl_session_callback, camera_buffer_allocator_hwl,
+          camera_device_session_hwl));
+  if (session == nullptr) {
+    ALOGE("%s: Creating ZslSnapshotCaptureSession failed.", __FUNCTION__);
+    return nullptr;
+  }
+
+  status_t res = session->Initialize(camera_device_session_hwl, stream_config,
+                                     process_capture_result, notify,
+                                     hal_configured_streams);
+  if (res != OK) {
+    ALOGE("%s: Initializing ZslSnapshotCaptureSession failed: %s (%d).",
+          __FUNCTION__, strerror(-res), res);
+    return nullptr;
+  }
+  return session;
+}
+
+ZslSnapshotCaptureSession::~ZslSnapshotCaptureSession() {
+  if (camera_device_session_hwl_ != nullptr) {
+    camera_device_session_hwl_->DestroyPipelines();
+  }
+  // Need to explicitly release SnapshotProcessBlock by releasing
+  // SnapshotRequestProcessor before the lib handle is released.
+  snapshot_request_processor_ = nullptr;
+  dlclose(snapshot_process_block_lib_handle_);
+}
+
+status_t ZslSnapshotCaptureSession::BuildPipelines(
+    ProcessBlock* process_block, ProcessBlock* snapshot_process_block,
+    std::vector<HalStream>* hal_configured_streams) {
+  ATRACE_CALL();
+  if (process_block == nullptr || hal_configured_streams == nullptr ||
+      snapshot_process_block == nullptr) {
+    ALOGE(
+        "%s: process_block (%p) or hal_configured_streams (%p) or "
+        "snapshot_process_block (%p) is nullptr",
+        __FUNCTION__, process_block, hal_configured_streams,
+        snapshot_process_block);
+    return BAD_VALUE;
+  }
+
+  status_t res;
+
+  std::vector<HalStream> snapshot_hal_configured_streams;
+  res = snapshot_process_block->GetConfiguredHalStreams(
+      &snapshot_hal_configured_streams);
+  if (res != OK) {
+    ALOGE("%s: Getting snapshot HAL streams failed: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+
+  for (uint32_t i = 0; i < hal_configured_streams->size(); i++) {
+    if (hal_configured_streams->at(i).id == additional_stream_id_) {
+      // Reserve additional buffer(s).
+      hal_configured_streams->at(i).max_buffers += kAdditionalBufferNumber;
+      // Allocate internal YUV stream buffers
+      res = internal_stream_manager_->AllocateBuffers(
+          hal_configured_streams->at(i),
+          /*additional_num_buffers=*/kAdditionalBufferNumber);
+      if (res != OK) {
+        ALOGE("%s: AllocateBuffers failed.", __FUNCTION__);
+        return UNKNOWN_ERROR;
+      }
+      break;
+    }
+  }
+
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::ConfigureStreams(
+    const StreamConfiguration& stream_config,
+    RequestProcessor* request_processor, ProcessBlock* process_block,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
+    int32_t& additional_stream_id) {
+  ATRACE_CALL();
+  if (request_processor == nullptr || process_block == nullptr) {
+    ALOGE("%s: request_processor (%p) or process_block (%p) is nullptr",
+          __FUNCTION__, request_processor, process_block);
+    return BAD_VALUE;
+  }
+  StreamConfiguration process_block_stream_config;
+  // Configure streams for request processor
+  status_t res = request_processor->ConfigureStreams(
+      internal_stream_manager_.get(), stream_config,
+      &process_block_stream_config);
+  if (res != OK) {
+    ALOGE("%s: Configuring stream for request processor failed.", __FUNCTION__);
+    return res;
+  }
+
+  // Check all streams are configured.
+  if (stream_config.streams.size() > process_block_stream_config.streams.size()) {
+    ALOGE("%s: stream_config has %zu streams but only configured %zu streams",
+          __FUNCTION__, stream_config.streams.size(),
+          process_block_stream_config.streams.size());
+    return UNKNOWN_ERROR;
+  }
+  for (auto& stream : stream_config.streams) {
+    bool found = false;
+    for (auto& configured_stream : process_block_stream_config.streams) {
+      if (stream.id == configured_stream.id) {
+        found = true;
+        break;
+      }
+    }
+
+    if (!found) {
+      ALOGE("%s: Cannot find stream %u in configured streams.", __FUNCTION__,
+            stream.id);
+      return UNKNOWN_ERROR;
+    }
+  }
+
+  for (auto& stream : process_block_stream_config.streams) {
+    bool found = false;
+    for (auto& configured_stream : stream_config.streams) {
+      if (stream.id == configured_stream.id) {
+        found = true;
+        break;
+      }
+    }
+    if (!found) {
+      additional_stream_id = stream.id;
+      break;
+    }
+  }
+
+  if (additional_stream_id == -1) {
+    ALOGE("%s: Configuring stream fail due to wrong additional_stream_id",
+          __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+
+  // Create preview result processor. Stream ID is not set at this stage.
+  auto realtime_result_processor = RealtimeZslResultProcessor::Create(
+      internal_stream_manager_.get(), additional_stream_id,
+      HAL_PIXEL_FORMAT_YCBCR_420_888, partial_result_count_);
+  if (realtime_result_processor == nullptr) {
+    ALOGE("%s: Creating RealtimeZslResultProcessor failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+  realtime_result_processor_ = realtime_result_processor.get();
+  realtime_result_processor->SetResultCallback(process_capture_result, notify);
+
+  res = process_block->SetResultProcessor(std::move(realtime_result_processor));
+  if (res != OK) {
+    ALOGE("%s: Setting result process in process block failed.", __FUNCTION__);
+    return res;
+  }
+
+  // Configure streams for process block.
+  res = process_block->ConfigureStreams(process_block_stream_config,
+                                        stream_config);
+  if (res != OK) {
+    ALOGE("%s: Configuring stream for process block failed.", __FUNCTION__);
+    return res;
+  }
+
+  for (auto& hal_stream : *hal_config_) {
+    if (hal_stream.id == additional_stream_id) {
+      // Set the producer usage so that the buffer will be 64 byte aligned.
+      hal_stream.producer_usage |=
+          (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN);
+    }
+  }
+
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::ConfigureSnapshotStreams(
+    const StreamConfiguration& stream_config) {
+  ATRACE_CALL();
+  if (snapshot_process_block_ == nullptr ||
+      snapshot_request_processor_ == nullptr) {
+    ALOGE(
+        "%s: snapshot_process_block_ or snapshot_request_processor_ is nullptr",
+        __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  StreamConfiguration process_block_stream_config;
+  // Configure streams for request processor
+  status_t res = snapshot_request_processor_->ConfigureStreams(
+      internal_stream_manager_.get(), stream_config,
+      &process_block_stream_config);
+  if (res != OK) {
+    ALOGE("%s: Configuring stream for request processor failed.", __FUNCTION__);
+    return res;
+  }
+
+  // Configure streams for snapshot process block.
+  res = snapshot_process_block_->ConfigureStreams(process_block_stream_config,
+                                                  stream_config);
+  if (res != OK) {
+    ALOGE("%s: Configuring snapshot stream for process block failed.",
+          __FUNCTION__);
+    return res;
+  }
+
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::SetupSnapshotProcessChain(
+    const StreamConfiguration& stream_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify) {
+  ATRACE_CALL();
+  if (snapshot_process_block_ != nullptr ||
+      snapshot_result_processor_ != nullptr ||
+      snapshot_request_processor_ != nullptr) {
+    ALOGE(
+        "%s: snapshot_process_block_(%p) or snapshot_result_processor_(%p) or "
+        "snapshot_request_processor_(%p) is/are "
+        "already set",
+        __FUNCTION__, snapshot_process_block_, snapshot_result_processor_,
+        snapshot_request_processor_.get());
+    return BAD_VALUE;
+  }
+
+  std::unique_ptr<ProcessBlock> snapshot_process_block =
+      CreateSnapshotProcessBlock();
+  if (snapshot_process_block == nullptr) {
+    ALOGE("%s: Creating SnapshotProcessBlock failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+  snapshot_process_block_ = snapshot_process_block.get();
+
+  snapshot_request_processor_ = SnapshotRequestProcessor::Create(
+      camera_device_session_hwl_, hwl_session_callback_, additional_stream_id_);
+  if (snapshot_request_processor_ == nullptr) {
+    ALOGE("%s: Creating SnapshotRequestProcessor failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+
+  std::unique_ptr<SnapshotResultProcessor> snapshot_result_processor =
+      SnapshotResultProcessor::Create(internal_stream_manager_.get(),
+                                      additional_stream_id_);
+  if (snapshot_result_processor == nullptr) {
+    ALOGE("%s: Creating SnapshotResultProcessor failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+  snapshot_result_processor_ = snapshot_result_processor.get();
+
+  status_t res = snapshot_request_processor_->SetProcessBlock(
+      std::move(snapshot_process_block));
+  if (res != OK) {
+    ALOGE("%s: Setting process block for RequestProcessor failed: %s(%d)",
+          __FUNCTION__, strerror(-res), res);
+    return res;
+  }
+
+  res = snapshot_process_block_->SetResultProcessor(
+      std::move(snapshot_result_processor));
+
+  snapshot_result_processor_->SetResultCallback(process_capture_result, notify);
+  res = ConfigureSnapshotStreams(stream_config);
+  if (res != OK) {
+    ALOGE("%s: Configuring snapshot stream failed: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::SetupRealtimeProcessChain(
+    const StreamConfiguration& stream_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify) {
+  ATRACE_CALL();
+  if (realtime_process_block_ != nullptr ||
+      realtime_result_processor_ != nullptr ||
+      realtime_request_processor_ != nullptr) {
+    ALOGE(
+        "%s: realtime_process_block_(%p) or realtime_result_processor_(%p) or "
+        "realtime_request_processor_(%p) is/are "
+        "already set",
+        __FUNCTION__, realtime_process_block_, realtime_result_processor_,
+        realtime_request_processor_.get());
+    return BAD_VALUE;
+  }
+
+  // Create process block
+  auto realtime_process_block = CaptureSessionWrapperProcessBlock::Create(
+      external_capture_session_entries_, capture_session_entries_,
+      hwl_session_callback_, camera_buffer_allocator_hwl_,
+      camera_device_session_hwl_, hal_config_);
+  if (realtime_process_block == nullptr) {
+    ALOGE("%s: Creating RealtimeProcessBlock failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+  realtime_process_block_ = realtime_process_block.get();
+
+  // Create realtime request processor.
+  realtime_request_processor_ = RealtimeZslRequestProcessor::Create(
+      camera_device_session_hwl_, HAL_PIXEL_FORMAT_YCBCR_420_888);
+  if (realtime_request_processor_ == nullptr) {
+    ALOGE("%s: Creating RealtimeZslRequestProcessor failed.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+
+  // realtime result processor will be created inside ConfigureStreams when the
+  // additional stream id is determined.
+
+  status_t res = realtime_request_processor_->SetProcessBlock(
+      std::move(realtime_process_block));
+  if (res != OK) {
+    ALOGE("%s: Setting process block for RequestProcessor failed: %s(%d)",
+          __FUNCTION__, strerror(-res), res);
+    return res;
+  }
+
+  res = ConfigureStreams(stream_config, realtime_request_processor_.get(),
+                         realtime_process_block_, process_capture_result,
+                         notify, additional_stream_id_);
+  if (res != OK) {
+    ALOGE("%s: Configuring stream failed: %s(%d)", __FUNCTION__, strerror(-res),
+          res);
+    return res;
+  }
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::PurgeHalConfiguredStream(
+    const StreamConfiguration& stream_config,
+    std::vector<HalStream>* hal_configured_streams) {
+  if (hal_configured_streams == nullptr) {
+    ALOGE("%s: HAL configured stream list is null.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  std::set<int32_t> framework_stream_id_set;
+  for (auto& stream : stream_config.streams) {
+    framework_stream_id_set.insert(stream.id);
+  }
+
+  std::vector<HalStream> configured_streams;
+  for (auto& hal_stream : *hal_configured_streams) {
+    if (framework_stream_id_set.find(hal_stream.id) !=
+        framework_stream_id_set.end()) {
+      configured_streams.push_back(hal_stream);
+    }
+  }
+  *hal_configured_streams = configured_streams;
+  return OK;
+}
+
+ZslSnapshotCaptureSession::ZslSnapshotCaptureSession(
+    const std::vector<ExternalCaptureSessionFactory*>&
+        external_capture_session_entries,
+    const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+    HwlSessionCallback hwl_session_callback,
+    CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+    CameraDeviceSessionHwl* camera_device_session_hwl)
+    : external_capture_session_entries_(external_capture_session_entries),
+      capture_session_entries_(capture_session_entries),
+      hwl_session_callback_(hwl_session_callback),
+      camera_buffer_allocator_hwl_(camera_buffer_allocator_hwl),
+      camera_device_session_hwl_(camera_device_session_hwl) {
+}
+
+status_t ZslSnapshotCaptureSession::Initialize(
+    CameraDeviceSessionHwl* camera_device_session_hwl,
+    const StreamConfiguration& stream_config,
+    ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
+    std::vector<HalStream>* hal_configured_streams) {
+  ATRACE_CALL();
+  if (!IsStreamConfigurationSupported(camera_device_session_hwl, stream_config)) {
+    ALOGE("%s: stream configuration is not supported.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  std::unique_ptr<HalCameraMetadata> characteristics;
+  status_t res =
+      camera_device_session_hwl->GetCameraCharacteristics(&characteristics);
+  if (res != OK) {
+    ALOGE("%s: GetCameraCharacteristics failed.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  for (auto stream : stream_config.streams) {
+    if (utils::IsPreviewStream(stream)) {
+      hal_preview_stream_id_ = stream.id;
+      break;
+    }
+  }
+  camera_device_session_hwl_ = camera_device_session_hwl;
+  hal_config_ = hal_configured_streams;
+
+  // Create result dispatcher
+  partial_result_count_ = kPartialResult;
+  camera_metadata_ro_entry partial_result_entry;
+  res = characteristics->Get(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+                             &partial_result_entry);
+  if (res == OK) {
+    partial_result_count_ = partial_result_entry.data.i32[0];
+  }
+  result_dispatcher_ = ResultDispatcher::Create(partial_result_count_,
+                                                process_capture_result, notify);
+  if (result_dispatcher_ == nullptr) {
+    ALOGE("%s: Cannot create result dispatcher.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+
+  internal_stream_manager_ = InternalStreamManager::Create(
+      /*buffer_allocator=*/nullptr, partial_result_count_);
+  if (internal_stream_manager_ == nullptr) {
+    ALOGE("%s: Cannot create internal stream manager.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+
+  device_session_notify_ = notify;
+  process_capture_result_ =
+      ProcessCaptureResultFunc([this](std::unique_ptr<CaptureResult> result) {
+        ProcessCaptureResult(std::move(result));
+      });
+  notify_ = NotifyFunc(
+      [this](const NotifyMessage& message) { NotifyHalMessage(message); });
+
+  // Setup and connect realtime process chain
+  res = SetupRealtimeProcessChain(stream_config, process_capture_result_,
+                                  notify_);
+  if (res != OK) {
+    ALOGE("%s: SetupRealtimeProcessChain fail: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+
+  // Setup snapshot process chain
+  res = SetupSnapshotProcessChain(stream_config, process_capture_result_,
+                                  notify_);
+  if (res != OK) {
+    ALOGE("%s: SetupSnapshotProcessChain fail: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+
+  // Realtime and snapshot streams are configured
+  // Start to build pipleline
+  res = BuildPipelines(realtime_process_block_, snapshot_process_block_,
+                       hal_configured_streams);
+  if (res != OK) {
+    ALOGE("%s: Building pipelines failed: %s(%d)", __FUNCTION__, strerror(-res),
+          res);
+    return res;
+  }
+
+  res = PurgeHalConfiguredStream(stream_config, hal_configured_streams);
+  if (res != OK) {
+    ALOGE("%s: Removing internal streams from configured stream failed: %s(%d)",
+          __FUNCTION__, strerror(-res), res);
+    return res;
+  }
+
+  if (res != OK) {
+    ALOGE("%s: Connecting process chain failed: %s(%d)", __FUNCTION__,
+          strerror(-res), res);
+    return res;
+  }
+
+  return OK;
+}
+
+status_t ZslSnapshotCaptureSession::ProcessRequest(const CaptureRequest& request) {
+  ATRACE_CALL();
+  status_t res = result_dispatcher_->AddPendingRequest(request);
+  if (res != OK) {
+    ALOGE("%s: frame(%d) fail to AddPendingRequest", __FUNCTION__,
+          request.frame_number);
+    return BAD_VALUE;
+  }
+  if (IsSwDenoiseSnapshotCompatible(request)) {
+    res = snapshot_request_processor_->ProcessRequest(request);
+    if (res != OK) {
+      ALOGW(
+          "%s: frame (%d) fall back to real time request for snapshot: %s (%d)",
+          __FUNCTION__, request.frame_number, strerror(-res), res);
+      res = realtime_request_processor_->ProcessRequest(request);
+    }
+  } else {
+    res = realtime_request_processor_->ProcessRequest(request);
+  }
+
+  if (res != OK) {
+    ALOGE("%s: ProcessRequest (%d) fail and remove pending request",
+          __FUNCTION__, request.frame_number);
+    result_dispatcher_->RemovePendingRequest(request.frame_number);
+  }
+  return res;
+}
+
+status_t ZslSnapshotCaptureSession::Flush() {
+  ATRACE_CALL();
+  return realtime_request_processor_->Flush();
+}
+
+void ZslSnapshotCaptureSession::ProcessCaptureResult(
+    std::unique_ptr<CaptureResult> result) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(callback_lock_);
+  if (result == nullptr) {
+    return;
+  }
+
+  if (result->result_metadata) {
+    camera_device_session_hwl_->FilterResultMetadata(
+        result->result_metadata.get());
+  }
+
+  status_t res = result_dispatcher_->AddResult(std::move(result));
+  if (res != OK) {
+    ALOGE("%s: fail to AddResult", __FUNCTION__);
+    return;
+  }
+}
+
+void ZslSnapshotCaptureSession::NotifyHalMessage(const NotifyMessage& message) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(callback_lock_);
+  if (device_session_notify_ == nullptr) {
+    ALOGE("%s: device_session_notify_ is nullptr. Dropping a message.",
+          __FUNCTION__);
+    return;
+  }
+
+  if (message.type == MessageType::kShutter) {
+    status_t res =
+        result_dispatcher_->AddShutter(message.message.shutter.frame_number,
+                                       message.message.shutter.timestamp_ns);
+    if (res != OK) {
+      ALOGE("%s: AddShutter for frame %u failed: %s (%d).", __FUNCTION__,
+            message.message.shutter.frame_number, strerror(-res), res);
+      return;
+    }
+  } else if (message.type == MessageType::kError) {
+    status_t res = result_dispatcher_->AddError(message.message.error);
+    if (res != OK) {
+      ALOGE("%s: AddError for frame %u failed: %s (%d).", __FUNCTION__,
+            message.message.error.frame_number, strerror(-res), res);
+      return;
+    }
+  } else {
+    ALOGW("%s: Unsupported message type: %u", __FUNCTION__, message.type);
+    device_session_notify_(message);
+  }
+}
+
+}  // namespace google_camera_hal
+}  // namespace android
diff --git a/common/hal/google_camera_hal/zsl_snapshot_capture_session.h b/common/hal/google_camera_hal/zsl_snapshot_capture_session.h
new file mode 100644
index 0000000..dab4515
--- /dev/null
+++ b/common/hal/google_camera_hal/zsl_snapshot_capture_session.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_ZSL_SNAPSHOT_CAPTURE_SESSION_H_
+#define HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_ZSL_SNAPSHOT_CAPTURE_SESSION_H_
+
+#include "camera_buffer_allocator_hwl.h"
+#include "camera_device_session_hwl.h"
+#include "capture_session.h"
+#include "capture_session_utils.h"
+#include "capture_session_wrapper_process_block.h"
+#include "hwl_types.h"
+#include "process_block.h"
+#include "realtime_zsl_request_processor.h"
+#include "realtime_zsl_result_processor.h"
+#include "request_processor.h"
+#include "result_dispatcher.h"
+#include "result_processor.h"
+#include "snapshot_request_processor.h"
+#include "snapshot_result_processor.h"
+
+namespace android {
+namespace google_camera_hal {
+
+class CameraDeviceSession;
+
+// ZslSnapshotCaptureSession implements a CaptureSession that contains two
+// process chains
+//
+//  1.SnapshotRequestProcessor->SnapshotProcessBlock->SnapshotResultProcessor
+//
+//  2.RealtimeZslRequestProcessor->CaptureSessionWrapperProcessBlock->RealtimeZslResultProcessor
+//                                    ||  /\
+//                                    \/  ||
+//                             embedded capture session
+class ZslSnapshotCaptureSession : public CaptureSession {
+ public:
+  // Return if the device session HWL and stream configuration are supported.
+  static bool IsStreamConfigurationSupported(
+      CameraDeviceSessionHwl* device_session_hwl,
+      const StreamConfiguration& stream_config);
+
+  // Create a ZslSnapshotCaptureSession.
+  //
+  // device_session_hwl is owned by the caller and must be valid during the
+  // lifetime of ZslSnapshotCaptureSession.
+  // stream_config is the stream configuration.
+  // process_capture_result is the callback function to notify results.
+  // notify is the callback function to notify messages.
+  // hal_configured_streams will be filled with HAL configured streams.
+  // camera_allocator_hwl is owned by the caller and must be valid during the
+  // lifetime of ZslSnapshotCaptureSession.
+  static std::unique_ptr<CaptureSession> Create(
+      const StreamConfiguration& stream_config,
+      const std::vector<ExternalCaptureSessionFactory*>&
+          external_capture_session_entries,
+      const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+      HwlSessionCallback hwl_session_callback,
+      CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+      CameraDeviceSessionHwl* camera_device_session_hwl,
+      std::vector<HalStream>* hal_configured_streams,
+      ProcessCaptureResultFunc process_capture_result, NotifyFunc notify);
+
+  virtual ~ZslSnapshotCaptureSession();
+
+  // Override functions in CaptureSession start.
+  status_t ProcessRequest(const CaptureRequest& request) override;
+
+  status_t Flush() override;
+  // Override functions in CaptureSession end.
+
+ protected:
+  ZslSnapshotCaptureSession(
+      const std::vector<ExternalCaptureSessionFactory*>&
+          external_capture_session_entries,
+      const std::vector<CaptureSessionEntryFuncs>& capture_session_entries,
+      HwlSessionCallback hwl_session_callback,
+      CameraBufferAllocatorHwl* camera_buffer_allocator_hwl,
+      CameraDeviceSessionHwl* camera_device_session_hwl);
+
+ private:
+  static constexpr uint32_t kPartialResult = 1;
+  static constexpr int kAdditionalBufferNumber = 3;
+
+  status_t Initialize(CameraDeviceSessionHwl* device_session_hwl,
+                      const StreamConfiguration& stream_config,
+                      ProcessCaptureResultFunc process_capture_result,
+                      NotifyFunc notify,
+                      std::vector<HalStream>* hal_configured_streams);
+
+  status_t SetupSnapshotProcessChain(
+      const StreamConfiguration& stream_config,
+      ProcessCaptureResultFunc process_capture_result, NotifyFunc notify);
+
+  status_t SetupRealtimeProcessChain(
+      const StreamConfiguration& stream_config,
+      ProcessCaptureResultFunc process_capture_result, NotifyFunc notify);
+
+  status_t ConfigureSnapshotStreams(const StreamConfiguration& stream_config);
+
+  // Configure streams for request processor and process block.
+  status_t ConfigureStreams(const StreamConfiguration& stream_config,
+                            RequestProcessor* request_processor,
+                            ProcessBlock* process_block,
+                            ProcessCaptureResultFunc process_capture_result,
+                            NotifyFunc notify, int32_t& additiona_stream_id);
+
+  // Build pipelines and return HAL configured streams.
+  status_t BuildPipelines(ProcessBlock* process_block,
+                          ProcessBlock* snapshot_process_block,
+                          std::vector<HalStream>* hal_configured_streams);
+
+  status_t PurgeHalConfiguredStream(
+      const StreamConfiguration& stream_config,
+      std::vector<HalStream>* hal_configured_streams);
+
+  std::unique_ptr<ProcessBlock> CreateSnapshotProcessBlock();
+
+  // Invoked when receiving a result from result processor.
+  void ProcessCaptureResult(std::unique_ptr<CaptureResult> result);
+
+  // Invoked when receiving a message from result processor.
+  void NotifyHalMessage(const NotifyMessage& message);
+
+  std::unique_ptr<InternalStreamManager> internal_stream_manager_;
+
+  std::unique_ptr<RealtimeZslRequestProcessor> realtime_request_processor_;
+  // CaptureSessionWrapperProcessBlock will be owned and released by
+  // RealtimeZslRequestProcessor.
+  CaptureSessionWrapperProcessBlock* realtime_process_block_ = nullptr;
+  // RealtimeZslResultProcessor will be owned and released by
+  // CaptureSessionWrapperProcessBlock.
+  RealtimeZslResultProcessor* realtime_result_processor_ = nullptr;
+
+  std::unique_ptr<SnapshotRequestProcessor> snapshot_request_processor_;
+  // SnapshotProcessBlock will be owned and released by
+  // SnapshotRequestProcessor.
+  ProcessBlock* snapshot_process_block_ = nullptr;
+  // SnapshotResultProcessor will be owned and released by SnapshotProcessBlock.
+  SnapshotResultProcessor* snapshot_result_processor_ = nullptr;
+
+  // Use this stream id to check the request is ZSL compatible
+  int32_t hal_preview_stream_id_ = -1;
+
+  int32_t additional_stream_id_ = -1;
+
+  std::unique_ptr<ResultDispatcher> result_dispatcher_;
+
+  std::mutex callback_lock_;
+  // The following callbacks must be protected by callback_lock_.
+  ProcessCaptureResultFunc process_capture_result_;
+  NotifyFunc notify_;
+  // For error notify to framework directly
+  NotifyFunc device_session_notify_;
+
+  const std::vector<ExternalCaptureSessionFactory*>&
+      external_capture_session_entries_;
+  const std::vector<CaptureSessionEntryFuncs>& capture_session_entries_;
+  HwlSessionCallback hwl_session_callback_;
+  CameraBufferAllocatorHwl* camera_buffer_allocator_hwl_ = nullptr;
+  // device_session_hwl_ is owned by the client.
+  CameraDeviceSessionHwl* camera_device_session_hwl_ = nullptr;
+
+  std::vector<HalStream>* hal_config_ = nullptr;
+
+  using GetProcessBlockFactoryFunc = ExternalProcessBlockFactory* (*)();
+  GetProcessBlockFactoryFunc snapshot_process_block_factory_;
+  // Opened library handles that should be closed on destruction
+  void* snapshot_process_block_lib_handle_ = nullptr;
+
+  // Partial result count reported by HAL
+  uint32_t partial_result_count_ = 1;
+};
+
+}  // namespace google_camera_hal
+}  // namespace android
+
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_GOOGLE_CAMERA_HAL_ZSL_SNAPSHOT_CAPTURE_SESSION_H_
\ No newline at end of file
diff --git a/common/hal/hidl_service/Android.bp b/common/hal/hidl_service/Android.bp
index 69b2596..29f7203 100644
--- a/common/hal/hidl_service/Android.bp
+++ b/common/hal/hidl_service/Android.bp
@@ -15,18 +15,60 @@
  */
 
 package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "hardware_google_camera_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-BSD
-    default_applicable_licenses: ["hardware_google_camera_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_library_shared {
-    name: "android.hardware.camera.provider@2.6-impl-google",
-    defaults: ["google_camera_hal_defaults"],
+soong_config_module_type {
+    name: "gch_lazy_hal_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "gch",
+    bool_variables: ["use_lazy_hal"],
+    properties: ["enabled"],
+}
+
+gch_lazy_hal_cc_defaults {
+    name: "camera_service_eager_hal_defaults",
+    enabled: true,
+    soong_config_variables: {
+        use_lazy_hal: {
+            enabled: false,
+        },
+    },
+}
+
+gch_lazy_hal_cc_defaults {
+    name: "camera_service_lazy_hal_defaults",
+    enabled: false,
+    soong_config_variables: {
+        use_lazy_hal: {
+            enabled: true,
+        },
+    },
+}
+
+sh_binary_host {
+    name: "camera_hal_version_script",
+    src: "version_script.sh",
+    filename_from_src: true,
+}
+
+cc_genrule {
+    name: "hidl_camera_build_version",
+    tools: [":camera_hal_version_script"],
+    cmd: "$(location :camera_hal_version_script) $(in) $(out)",
+    vendor: true,
+    srcs: [
+        "hidl_camera_build_version.inl",
+    ],
+    out: ["hidl_camera_build_version.h"],
+}
+
+cc_defaults {
+    name: "camera_service_defaults",
+    defaults: [
+        "google_camera_hal_defaults",
+        "apex_update_listener_cc_defaults_static",
+    ],
     vendor: true,
     relative_install_path: "hw",
     srcs: [
@@ -36,6 +78,11 @@
         "hidl_profiler.cc",
         "hidl_thermal_utils.cc",
         "hidl_utils.cc",
+        "libc_wrappers.cc",
+        "service.cc",
+    ],
+    generated_headers: [
+        "hidl_camera_build_version",
     ],
     compile_multilib: "first",
     shared_libs: [
@@ -43,13 +90,18 @@
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.graphics.mapper@2.0",
         "android.hardware.graphics.mapper@3.0",
         "android.hardware.graphics.mapper@4.0",
         "android.hardware.thermal@2.0",
+        "libbinder",
+        "libbase",
         "libcamera_metadata",
         "libcutils",
         "libfmq",
@@ -60,45 +112,30 @@
         "libutils",
         "lib_profiler",
     ],
+    export_shared_lib_headers: [
+        "lib_profiler",
+    ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
     ],
+    vintf_fragments: ["android.hardware.camera.provider@2.7-service-google.xml"],
 }
 
-cc_defaults {
-    name: "camera_service_defaults",
-    defaults: ["google_camera_hal_defaults"],
-    vendor: true,
-    relative_install_path: "hw",
-    srcs: ["service.cc"],
-    compile_multilib: "first",
-    shared_libs: [
-        "android.hardware.camera.device@3.2",
-        "android.hardware.camera.device@3.3",
-        "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.5",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.graphics.mapper@2.0",
-        "android.hardware.graphics.mapper@3.0",
-        "libbinder",
-        "libhidlbase",
-        "liblog",
-        "libutils",
+cc_binary {
+    name: "android.hardware.camera.provider@2.7-service-google",
+    defaults: [
+        "camera_service_defaults",
+        "camera_service_eager_hal_defaults",
     ],
-    vintf_fragments: ["android.hardware.camera.provider@2.6-service-google.xml"]
+    init_rc: ["android.hardware.camera.provider@2.7-service-google.rc"],
 }
 
 cc_binary {
-    name: "android.hardware.camera.provider@2.6-service-google",
-    defaults: ["camera_service_defaults"],
-    init_rc: ["android.hardware.camera.provider@2.6-service-google.rc"],
-}
-
-cc_binary {
-    name: "android.hardware.camera.provider@2.6-service-google-lazy",
-    defaults: ["camera_service_defaults"],
-    init_rc: ["android.hardware.camera.provider@2.6-service-google-lazy.rc"],
+    name: "android.hardware.camera.provider@2.7-service-google-lazy",
+    defaults: [
+        "camera_service_defaults",
+        "camera_service_lazy_hal_defaults",
+    ],
+    init_rc: ["android.hardware.camera.provider@2.7-service-google-lazy.rc"],
     cflags: ["-DLAZY_SERVICE"],
 }
diff --git a/common/hal/hidl_service/CleanSpec.mk b/common/hal/hidl_service/CleanSpec.mk
new file mode 100644
index 0000000..4c964ca
--- /dev/null
+++ b/common/hal/hidl_service/CleanSpec.mk
@@ -0,0 +1,48 @@
+# Copyright 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list.  These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+#     $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+#     $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list.  E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/vintf/manifest/android.hardware.camera.provider@*-service-google.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/init/android.hardware.camera.provider@*-service-google.rc)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/init/android.hardware.camera.provider@*-service-google-lazy.rc)
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google-lazy.rc b/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google-lazy.rc
deleted file mode 100644
index 7e75260..0000000
--- a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google-lazy.rc
+++ /dev/null
@@ -1,12 +0,0 @@
-service vendor.camera-provider-2-6-google /vendor/bin/hw/android.hardware.camera.provider@2.6-service-google-lazy
-    interface android.hardware.camera.provider@2.4::ICameraProvider internal/0
-    interface android.hardware.camera.provider@2.5::ICameraProvider internal/0
-    interface android.hardware.camera.provider@2.6::ICameraProvider internal/0
-    oneshot
-    disabled
-    class hal
-    user system
-    group system
-    capabilities SYS_NICE
-    rlimit rtprio 10 10
-    writepid /dev/cpuset/camera-daemon/tasks /dev/stune/camera-daemon/tasks
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.rc b/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.rc
deleted file mode 100644
index f4dec0e..0000000
--- a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service vendor.camera-provider-2-6-google /vendor/bin/hw/android.hardware.camera.provider@2.6-service-google
-    class hal
-    user system
-    group system
-    capabilities SYS_NICE
-    rlimit rtprio 10 10
-    writepid /dev/cpuset/camera-daemon/tasks /dev/stune/camera-daemon/tasks
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.xml b/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.xml
deleted file mode 100644
index d40e285..0000000
--- a/common/hal/hidl_service/android.hardware.camera.provider@2.6-service-google.xml
+++ /dev/null
@@ -1,39 +0,0 @@
-<!-- Copyright (c) 2020, The Linux Foundation. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-    * Redistributions of source code must retain the above copyright
-      notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-      copyright notice, this list of conditions and the following
-      disclaimer in the documentation and/or other materials provided
-      with the distribution.
-    * Neither the name of The Linux Foundation nor the names of its
-      contributors may be used to endorse or promote products derived
-      from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
-ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
-BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
-IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--->
-
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.camera.provider</name>
-        <transport>hwbinder</transport>
-        <version>2.6</version>
-        <interface>
-            <name>ICameraProvider</name>
-            <instance>internal/0</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google-lazy.rc b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google-lazy.rc
new file mode 100644
index 0000000..ff619d8
--- /dev/null
+++ b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google-lazy.rc
@@ -0,0 +1,13 @@
+service vendor.camera-provider-2-7-google /vendor/bin/hw/android.hardware.camera.provider@2.7-service-google-lazy
+    interface android.hardware.camera.provider@2.4::ICameraProvider internal/0
+    interface android.hardware.camera.provider@2.5::ICameraProvider internal/0
+    interface android.hardware.camera.provider@2.6::ICameraProvider internal/0
+    interface android.hardware.camera.provider@2.7::ICameraProvider internal/0
+    oneshot
+    disabled
+    class hal
+    user system
+    group system
+    capabilities SYS_NICE
+    rlimit rtprio 10 10
+    task_profiles CameraServiceCapacity CameraServicePerformance
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.rc b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.rc
new file mode 100644
index 0000000..b5cfab5
--- /dev/null
+++ b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.rc
@@ -0,0 +1,7 @@
+service vendor.camera-provider-2-7-google /vendor/bin/hw/android.hardware.camera.provider@2.7-service-google
+    class hal
+    user system
+    group system
+    capabilities SYS_NICE
+    rlimit rtprio 10 10
+    task_profiles CameraServiceCapacity CameraServicePerformance
diff --git a/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.xml b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.xml
new file mode 100644
index 0000000..481b46e
--- /dev/null
+++ b/common/hal/hidl_service/android.hardware.camera.provider@2.7-service-google.xml
@@ -0,0 +1,26 @@
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<manifest version="1.0" type="device">
+    <hal format="hidl">
+        <name>android.hardware.camera.provider</name>
+        <transport>hwbinder</transport>
+        <version>2.7</version>
+        <interface>
+            <name>ICameraProvider</name>
+            <instance>internal/0</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/common/hal/hidl_service/hidl_camera_build_version.inl b/common/hal/hidl_service/hidl_camera_build_version.inl
new file mode 100644
index 0000000..1d104c4
--- /dev/null
+++ b/common/hal/hidl_service/hidl_camera_build_version.inl
@@ -0,0 +1,6 @@
+#pragma once
+
+namespace {
+constexpr const char* kAndroidBuildId = "{BUILD_ID}";
+constexpr int64_t kHalManifestBuildNumber = UINT64_C({BUILD_NUMBER});
+}
diff --git a/common/hal/hidl_service/hidl_camera_device.cc b/common/hal/hidl_service/hidl_camera_device.cc
index bb50a4b..fd45a5b 100644
--- a/common/hal/hidl_service/hidl_camera_device.cc
+++ b/common/hal/hidl_service/hidl_camera_device.cc
@@ -16,9 +16,10 @@
 
 #define LOG_TAG "GCH_HidlCameraDevice"
 //#define LOG_NDEBUG 0
+#include "hidl_camera_device.h"
+
 #include <log/log.h>
 
-#include "hidl_camera_device.h"
 #include "hidl_camera_device_session.h"
 #include "hidl_profiler.h"
 #include "hidl_utils.h"
@@ -27,14 +28,14 @@
 namespace hardware {
 namespace camera {
 namespace device {
-namespace V3_5 {
+namespace V3_7 {
 namespace implementation {
 
 namespace hidl_utils = ::android::hardware::camera::implementation::hidl_utils;
 
 using ::android::google_camera_hal::HalCameraMetadata;
 
-const std::string HidlCameraDevice::kDeviceVersion = "3.5";
+const std::string HidlCameraDevice::kDeviceVersion = "3.7";
 
 std::unique_ptr<HidlCameraDevice> HidlCameraDevice::Create(
     std::unique_ptr<CameraDevice> google_camera_device) {
@@ -63,6 +64,15 @@
 
   camera_id_ = google_camera_device->GetPublicCameraId();
   google_camera_device_ = std::move(google_camera_device);
+  hidl_profiler_ = HidlProfiler::Create(camera_id_);
+  if (hidl_profiler_ == nullptr) {
+    ALOGE("%s: Failed to create HidlProfiler.", __FUNCTION__);
+    return UNKNOWN_ERROR;
+  }
+  hidl_profiler_->SetLatencyProfiler(google_camera_device_->GetProfiler(
+      camera_id_, hidl_profiler_->GetLatencyFlag()));
+  hidl_profiler_->SetFpsProfiler(google_camera_device_->GetProfiler(
+      camera_id_, hidl_profiler_->GetFpsFlag()));
 
   return OK;
 }
@@ -134,8 +144,8 @@
 Return<void> HidlCameraDevice::open(
     const sp<V3_2::ICameraDeviceCallback>& callback,
     ICameraDevice::open_cb _hidl_cb) {
-  auto profiler_item =
-      ::android::hardware::camera::implementation::hidl_profiler::OnCameraOpen();
+  auto profiler =
+      hidl_profiler_->MakeScopedProfiler(HidlProfiler::ScopedType::kOpen);
 
   std::unique_ptr<google_camera_hal::CameraDeviceSession> session;
   status_t res = google_camera_device_->CreateCameraDeviceSession(&session);
@@ -146,8 +156,8 @@
     return Void();
   }
 
-  auto hidl_session =
-      HidlCameraDeviceSession::Create(callback, std::move(session));
+  auto hidl_session = HidlCameraDeviceSession::Create(
+      callback, std::move(session), hidl_profiler_);
   if (hidl_session == nullptr) {
     ALOGE("%s: Creating HidlCameraDeviceSession failed.", __FUNCTION__);
     _hidl_cb(hidl_utils::ConvertToHidlStatus(res), nullptr);
@@ -211,6 +221,16 @@
 Return<void> HidlCameraDevice::isStreamCombinationSupported(
     const V3_4::StreamConfiguration& streams,
     ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) {
+  StreamConfiguration streams3_7;
+
+  hidl_utils::ConvertStreamConfigurationV34ToV37(streams, &streams3_7);
+
+  return isStreamCombinationSupported_3_7(streams3_7, _hidl_cb);
+}
+
+Return<void> HidlCameraDevice::isStreamCombinationSupported_3_7(
+    const V3_7::StreamConfiguration& streams,
+    ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) {
   bool is_supported = false;
   google_camera_hal::StreamConfiguration stream_config;
   status_t res = hidl_utils::ConverToHalStreamConfig(streams, &stream_config);
@@ -227,8 +247,8 @@
 }
 
 }  // namespace implementation
-}  // namespace V3_5
+}  // namespace V3_7
 }  // namespace device
 }  // namespace camera
 }  // namespace hardware
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/common/hal/hidl_service/hidl_camera_device.h b/common/hal/hidl_service/hidl_camera_device.h
index 88fcd1b..824d64d 100644
--- a/common/hal/hidl_service/hidl_camera_device.h
+++ b/common/hal/hidl_service/hidl_camera_device.h
@@ -17,23 +17,25 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_DEVICE_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_DEVICE_H_
 
-#include <android/hardware/camera/device/3.5/ICameraDevice.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
 #include "camera_device.h"
+#include "hidl_profiler.h"
 
 namespace android {
 namespace hardware {
 namespace camera {
 namespace device {
-namespace V3_5 {
+namespace V3_7 {
 namespace implementation {
 
 using ::android::hardware::camera::common::V1_0::CameraResourceCost;
 using ::android::hardware::camera::common::V1_0::Status;
 using ::android::hardware::camera::common::V1_0::TorchMode;
-using ::android::hardware::camera::device::V3_5::ICameraDevice;
 using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback;
+using ::android::hardware::camera::device::V3_7::ICameraDevice;
+using ::android::hardware::camera::implementation::HidlProfiler;
 
 using ::android::google_camera_hal::CameraDevice;
 
@@ -73,6 +75,11 @@
   Return<void> isStreamCombinationSupported(
       const V3_4::StreamConfiguration& streams,
       ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) override;
+
+  Return<void> isStreamCombinationSupported_3_7(
+      const StreamConfiguration& streams,
+      ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) override;
+
   // End of override functions in ICameraDevice
 
  protected:
@@ -83,10 +90,11 @@
 
   std::unique_ptr<CameraDevice> google_camera_device_;
   uint32_t camera_id_ = 0;
+  std::shared_ptr<HidlProfiler> hidl_profiler_;
 };
 
 }  // namespace implementation
-}  // namespace V3_5
+}  // namespace V3_7
 }  // namespace device
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_camera_device_session.cc b/common/hal/hidl_service/hidl_camera_device_session.cc
index 0179805..d0dc010 100644
--- a/common/hal/hidl_service/hidl_camera_device_session.cc
+++ b/common/hal/hidl_service/hidl_camera_device_session.cc
@@ -17,13 +17,14 @@
 #define LOG_TAG "GCH_HidlCameraDeviceSession"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
-#include <log/log.h>
+#include "hidl_camera_device_session.h"
 
 #include <cutils/properties.h>
 #include <cutils/trace.h>
+#include <log/log.h>
 #include <malloc.h>
+#include <utils/Trace.h>
 
-#include "hidl_camera_device_session.h"
 #include "hidl_profiler.h"
 #include "hidl_utils.h"
 
@@ -31,19 +32,19 @@
 namespace hardware {
 namespace camera {
 namespace device {
-namespace V3_5 {
+namespace V3_7 {
 namespace implementation {
 
 namespace hidl_utils = ::android::hardware::camera::implementation::hidl_utils;
-namespace hidl_profiler =
-    ::android::hardware::camera::implementation::hidl_profiler;
 
 using ::android::hardware::camera::device::V3_2::NotifyMsg;
 using ::android::hardware::camera::device::V3_2::StreamBuffer;
 using ::android::hardware::camera::device::V3_4::CaptureResult;
-using ::android::hardware::camera::device::V3_4::HalStreamConfiguration;
+using ::android::hardware::camera::device::V3_5::BufferRequest;
 using ::android::hardware::camera::device::V3_5::BufferRequestStatus;
 using ::android::hardware::camera::device::V3_5::StreamBufferRet;
+using ::android::hardware::camera::device::V3_5::StreamBuffersVal;
+using ::android::hardware::camera::device::V3_6::HalStreamConfiguration;
 using ::android::hardware::thermal::V1_0::ThermalStatus;
 using ::android::hardware::thermal::V1_0::ThermalStatusCode;
 using ::android::hardware::thermal::V2_0::Temperature;
@@ -51,7 +52,9 @@
 
 std::unique_ptr<HidlCameraDeviceSession> HidlCameraDeviceSession::Create(
     const sp<V3_2::ICameraDeviceCallback>& callback,
-    std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session) {
+    std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session,
+    std::shared_ptr<HidlProfiler> hidl_profiler) {
+  ATRACE_NAME("HidlCameraDeviceSession::Create");
   auto session =
       std::unique_ptr<HidlCameraDeviceSession>(new HidlCameraDeviceSession());
   if (session == nullptr) {
@@ -59,7 +62,8 @@
     return nullptr;
   }
 
-  status_t res = session->Initialize(callback, std::move(device_session));
+  status_t res =
+      session->Initialize(callback, std::move(device_session), hidl_profiler);
   if (res != OK) {
     ALOGE("%s: Initializing HidlCameraDeviceSession failed: %s(%d)",
           __FUNCTION__, strerror(-res), res);
@@ -70,6 +74,7 @@
 }
 
 HidlCameraDeviceSession::~HidlCameraDeviceSession() {
+  ATRACE_NAME("HidlCameraDeviceSession::~HidlCameraDeviceSession");
   close();
   // camera's closing, so flush any unused malloc pages
   mallopt(M_PURGE, 0);
@@ -86,13 +91,21 @@
   {
     std::lock_guard<std::mutex> pending_lock(pending_first_frame_buffers_mutex_);
     if (!hal_result->output_buffers.empty() &&
-        num_pending_first_frame_buffers_ > 0) {
+        num_pending_first_frame_buffers_ > 0 &&
+        first_request_frame_number_ == hal_result->frame_number) {
       num_pending_first_frame_buffers_ -= hal_result->output_buffers.size();
       if (num_pending_first_frame_buffers_ == 0) {
-        hidl_profiler::OnFirstFrameResult();
+        ALOGI("%s: First frame done", __FUNCTION__);
+        hidl_profiler_->FirstFrameEnd();
+        ATRACE_ASYNC_END("first_frame", 0);
+        ATRACE_ASYNC_END("switch_mode", 0);
       }
     }
   }
+  for (auto& buffer : hal_result->output_buffers) {
+    hidl_profiler_->ProfileFrameRate("Stream " +
+                                     std::to_string(buffer.stream_id));
+  }
 
   hidl_vec<CaptureResult> hidl_results(1);
   status_t res = hidl_utils::ConvertToHidlCaptureResult(
@@ -197,7 +210,7 @@
     }
 
     if (stream_buffer_return.val.getDiscriminator() ==
-        StreamBuffersVal::hidl_discriminator::buffers) {
+        V3_5::StreamBuffersVal::hidl_discriminator::buffers) {
       const hidl_vec<StreamBuffer>& hidl_buffers =
           stream_buffer_return.val.buffers();
       for (auto& hidl_buffer : hidl_buffers) {
@@ -319,12 +332,19 @@
 
 status_t HidlCameraDeviceSession::Initialize(
     const sp<V3_2::ICameraDeviceCallback>& callback,
-    std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session) {
+    std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session,
+    std::shared_ptr<HidlProfiler> hidl_profiler) {
+  ATRACE_NAME("HidlCameraDeviceSession::Initialize");
   if (device_session == nullptr) {
     ALOGE("%s: device_session is nullptr.", __FUNCTION__);
     return BAD_VALUE;
   }
 
+  if (hidl_profiler == nullptr) {
+    ALOGE("%s: hidl_profiler is nullptr.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
   status_t res = CreateMetadataQueue(&request_metadata_queue_,
                                      kRequestMetadataQueueSizeBytes,
                                      "ro.vendor.camera.req.fmq.size");
@@ -366,6 +386,7 @@
 
   hidl_device_callback_ = cast_res;
   device_session_ = std::move(device_session);
+  hidl_profiler_ = hidl_profiler;
 
   SetSessionCallbacks();
   return OK;
@@ -502,6 +523,7 @@
 Return<void> HidlCameraDeviceSession::constructDefaultRequestSettings(
     RequestTemplate type,
     ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) {
+  ATRACE_NAME("HidlCameraDeviceSession::constructDefaultRequestSettings");
   V3_2::CameraMetadata hidl_metadata;
 
   if (device_session_ == nullptr) {
@@ -531,16 +553,19 @@
   return Void();
 }
 
-Return<void> HidlCameraDeviceSession::configureStreams_3_5(
+Return<void> HidlCameraDeviceSession::configureStreams_3_7(
     const StreamConfiguration& requestedConfiguration,
-    ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb) {
+    ICameraDeviceSession::configureStreams_3_6_cb _hidl_cb) {
+  ATRACE_NAME("HidlCameraDeviceSession::configureStreams_3_7");
   HalStreamConfiguration hidl_hal_configs;
   if (device_session_ == nullptr) {
     _hidl_cb(Status::ILLEGAL_ARGUMENT, hidl_hal_configs);
     return Void();
   }
 
-  auto profiler_item = hidl_profiler::OnCameraStreamConfigure();
+  auto profiler = hidl_profiler_->MakeScopedProfiler(
+      HidlProfiler::ScopedType::kConfigureStream);
+
   first_frame_requested_ = false;
   num_pending_first_frame_buffers_ = 0;
 
@@ -581,19 +606,25 @@
   return Void();
 }
 
-Return<void> HidlCameraDeviceSession::processCaptureRequest_3_4(
+Return<void> HidlCameraDeviceSession::processCaptureRequest_3_7(
     const hidl_vec<CaptureRequest>& requests,
     const hidl_vec<BufferCache>& cachesToRemove,
-    processCaptureRequest_3_4_cb _hidl_cb) {
+    processCaptureRequest_3_7_cb _hidl_cb) {
   if (device_session_ == nullptr) {
     _hidl_cb(Status::ILLEGAL_ARGUMENT, 0);
     return Void();
   }
 
+  bool profile_first_request = false;
   if (!first_frame_requested_) {
     first_frame_requested_ = true;
-    num_pending_first_frame_buffers_ = requests[0].v3_2.outputBuffers.size();
-    hidl_profiler::OnFirstFrameRequest();
+    profile_first_request = true;
+    ATRACE_BEGIN("HidlCameraDeviceSession::FirstRequest");
+    num_pending_first_frame_buffers_ =
+        requests[0].v3_4.v3_2.outputBuffers.size();
+    first_request_frame_number_ = requests[0].v3_4.v3_2.frameNumber;
+    hidl_profiler_->FirstFrameStart();
+    ATRACE_ASYNC_BEGIN("first_frame", 0);
   }
 
   std::vector<google_camera_hal::BufferCache> hal_buffer_caches;
@@ -602,6 +633,9 @@
       hidl_utils::ConvertToHalBufferCaches(cachesToRemove, &hal_buffer_caches);
   if (res != OK) {
     _hidl_cb(Status::ILLEGAL_ARGUMENT, 0);
+    if (profile_first_request) {
+      ATRACE_END();
+    }
     return Void();
   }
 
@@ -617,6 +651,9 @@
       ALOGE("%s: Converting to HAL capture request failed: %s(%d)",
             __FUNCTION__, strerror(-res), res);
       _hidl_cb(hidl_utils::ConvertToHidlStatus(res), 0);
+      if (profile_first_request) {
+        ATRACE_END();
+      }
       return Void();
     }
 
@@ -635,6 +672,9 @@
   }
 
   _hidl_cb(hidl_utils::ConvertToHidlStatus(res), num_processed_requests);
+  if (profile_first_request) {
+    ATRACE_END();
+  }
   return Void();
 }
 
@@ -645,11 +685,18 @@
 }
 
 Return<Status> HidlCameraDeviceSession::flush() {
+  ATRACE_NAME("HidlCameraDeviceSession::flush");
+  ATRACE_ASYNC_BEGIN("switch_mode", 0);
   if (device_session_ == nullptr) {
     return Status::INTERNAL_ERROR;
   }
 
-  auto profiler_item = hidl_profiler::OnCameraFlush();
+  hidl_profiler_->SetLatencyProfiler(device_session_->GetProfiler(
+      hidl_profiler_->GetCameraId(), hidl_profiler_->GetLatencyFlag()));
+  hidl_profiler_->SetFpsProfiler(device_session_->GetProfiler(
+      hidl_profiler_->GetCameraId(), hidl_profiler_->GetFpsFlag()));
+  auto profiler =
+      hidl_profiler_->MakeScopedProfiler(HidlProfiler::ScopedType::kFlush);
 
   status_t res = device_session_->Flush();
   if (res != OK) {
@@ -662,8 +709,10 @@
 }
 
 Return<void> HidlCameraDeviceSession::close() {
+  ATRACE_NAME("HidlCameraDeviceSession::close");
   if (device_session_ != nullptr) {
-    auto profiler_item = hidl_profiler::OnCameraClose();
+    auto profiler =
+        hidl_profiler_->MakeScopedProfiler(HidlProfiler::ScopedType::kClose);
     device_session_ = nullptr;
   }
   return Void();
@@ -673,6 +722,7 @@
     const V3_2::CameraMetadata& oldSessionParams,
     const V3_2::CameraMetadata& newSessionParams,
     ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) {
+  ATRACE_NAME("HidlCameraDeviceSession::isReconfigurationRequired");
   std::unique_ptr<google_camera_hal::HalCameraMetadata> old_hal_session_metadata;
   status_t res = hidl_utils::ConvertToHalMetadata(0, nullptr, oldSessionParams,
                                                   &old_hal_session_metadata);
@@ -724,11 +774,58 @@
   _hidl_cb(Status::ILLEGAL_ARGUMENT, V3_4::HalStreamConfiguration());
   return Void();
 }
+
+Return<void> HidlCameraDeviceSession::configureStreams_3_5(
+    const V3_5::StreamConfiguration& requestedConfiguration,
+    configureStreams_3_5_cb _hidl_cb) {
+  configureStreams_3_6(
+      requestedConfiguration,
+      [_hidl_cb](Status s, device::V3_6::HalStreamConfiguration halConfig) {
+        V3_4::HalStreamConfiguration halConfig3_4;
+        halConfig3_4.streams.resize(halConfig.streams.size());
+        for (size_t i = 0; i < halConfig.streams.size(); i++) {
+          halConfig3_4.streams[i] = halConfig.streams[i].v3_4;
+        }
+        _hidl_cb(s, halConfig3_4);
+      });
+
+  return Void();
+}
+
+Return<void> HidlCameraDeviceSession::configureStreams_3_6(
+    const V3_5::StreamConfiguration& requestedConfiguration,
+    configureStreams_3_6_cb _hidl_cb) {
+  StreamConfiguration requestedConfiguration3_7;
+  requestedConfiguration3_7.streams.resize(
+      requestedConfiguration.v3_4.streams.size());
+  for (size_t i = 0; i < requestedConfiguration.v3_4.streams.size(); i++) {
+    requestedConfiguration3_7.streams[i].v3_4 =
+        requestedConfiguration.v3_4.streams[i];
+    requestedConfiguration3_7.streams[i].groupId = -1;
+  }
+  requestedConfiguration3_7.operationMode =
+      requestedConfiguration.v3_4.operationMode;
+  requestedConfiguration3_7.sessionParams =
+      requestedConfiguration.v3_4.sessionParams;
+  requestedConfiguration3_7.streamConfigCounter =
+      requestedConfiguration.streamConfigCounter;
+  requestedConfiguration3_7.multiResolutionInputImage = false;
+
+  configureStreams_3_7(requestedConfiguration3_7, _hidl_cb);
+  return Void();
+}
+
+Return<void> HidlCameraDeviceSession::switchToOffline(
+    const hidl_vec<int32_t>&, switchToOffline_cb _hidl_cb) {
+  _hidl_cb(Status::ILLEGAL_ARGUMENT, V3_6::CameraOfflineSessionInfo(), nullptr);
+  return Void();
+}
+
 Return<void> HidlCameraDeviceSession::processCaptureRequest(
     const hidl_vec<V3_2::CaptureRequest>& requests,
     const hidl_vec<BufferCache>& cachesToRemove,
     processCaptureRequest_cb _hidl_cb) {
-  hidl_vec<CaptureRequest> requests_3_4;
+  hidl_vec<V3_4::CaptureRequest> requests_3_4;
   requests_3_4.resize(requests.size());
   for (uint32_t i = 0; i < requests_3_4.size(); i++) {
     requests_3_4[i].v3_2 = requests[i];
@@ -737,8 +834,21 @@
   return processCaptureRequest_3_4(requests_3_4, cachesToRemove, _hidl_cb);
 }
 
+Return<void> HidlCameraDeviceSession::processCaptureRequest_3_4(
+    const hidl_vec<V3_4::CaptureRequest>& requests,
+    const hidl_vec<BufferCache>& cachesToRemove,
+    processCaptureRequest_cb _hidl_cb) {
+  hidl_vec<V3_7::CaptureRequest> requests_3_7;
+  requests_3_7.resize(requests.size());
+  for (uint32_t i = 0; i < requests_3_7.size(); i++) {
+    requests_3_7[i].v3_4 = requests[i];
+  }
+
+  return processCaptureRequest_3_7(requests_3_7, cachesToRemove, _hidl_cb);
+}
+
 }  // namespace implementation
-}  // namespace V3_5
+}  // namespace V3_7
 }  // namespace device
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_camera_device_session.h b/common/hal/hidl_service/hidl_camera_device_session.h
index 76835bb..ea2b600 100644
--- a/common/hal/hidl_service/hidl_camera_device_session.h
+++ b/common/hal/hidl_service/hidl_camera_device_session.h
@@ -17,32 +17,33 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_DEVICE_SESSION_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_DEVICE_SESSION_H_
 
-#include <android/hardware/camera/device/3.5/ICameraDevice.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
-#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 #include <android/hardware/thermal/2.0/IThermal.h>
 #include <fmq/MessageQueue.h>
 
 #include <shared_mutex>
 
 #include "camera_device_session.h"
+#include "hidl_profiler.h"
 #include "hidl_thermal_utils.h"
-#include "profiler.h"
 
 namespace android {
 namespace hardware {
 namespace camera {
 namespace device {
-namespace V3_5 {
+namespace V3_7 {
 namespace implementation {
 
 using ::android::hardware::camera::common::V1_0::Status;
 using ::android::hardware::camera::device::V3_2::BufferCache;
 using ::android::hardware::camera::device::V3_2::RequestTemplate;
-using ::android::hardware::camera::device::V3_4::CaptureRequest;
 using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback;
-using ::android::hardware::camera::device::V3_5::ICameraDeviceSession;
-using ::android::hardware::camera::device::V3_5::StreamConfiguration;
+using ::android::hardware::camera::device::V3_7::CaptureRequest;
+using ::android::hardware::camera::device::V3_7::ICameraDeviceSession;
+using ::android::hardware::camera::device::V3_7::StreamConfiguration;
+using ::android::hardware::camera::implementation::HidlProfiler;
 
 using MetadataQueue =
     ::android::hardware::MessageQueue<uint8_t, kSynchronizedReadWrite>;
@@ -59,17 +60,34 @@
   // nullptr.
   static std::unique_ptr<HidlCameraDeviceSession> Create(
       const sp<V3_2::ICameraDeviceCallback>& callback,
-      std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session);
+      std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session,
+      std::shared_ptr<HidlProfiler> hidl_profiler);
 
   virtual ~HidlCameraDeviceSession();
 
   // Override functions in ICameraDeviceSession
+  Return<void> configureStreams_3_7(
+      const StreamConfiguration& requestedConfiguration,
+      configureStreams_3_7_cb _hidl_cb) override;
+
+  Return<void> processCaptureRequest_3_7(
+      const hidl_vec<CaptureRequest>& requests,
+      const hidl_vec<BufferCache>& cachesToRemove,
+      processCaptureRequest_3_7_cb _hidl_cb) override;
+
+  Return<void> configureStreams_3_6(
+      const V3_5::StreamConfiguration& requestedConfiguration,
+      ICameraDeviceSession::configureStreams_3_6_cb _hidl_cb) override;
+
+  Return<void> switchToOffline(const hidl_vec<int32_t>& streamsToKeep,
+                               switchToOffline_cb _hidl_cb) override;
+
   Return<void> constructDefaultRequestSettings(
       RequestTemplate type,
       ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override;
 
   Return<void> configureStreams_3_5(
-      const StreamConfiguration& requestedConfiguration,
+      const V3_5::StreamConfiguration& requestedConfiguration,
       ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb) override;
 
   Return<void> getCaptureRequestMetadataQueue(
@@ -79,7 +97,7 @@
       ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override;
 
   Return<void> processCaptureRequest_3_4(
-      const hidl_vec<CaptureRequest>& requests,
+      const hidl_vec<V3_4::CaptureRequest>& requests,
       const hidl_vec<BufferCache>& cachesToRemove,
       processCaptureRequest_3_4_cb _hidl_cb) override;
 
@@ -123,7 +141,8 @@
   // Initialize HidlCameraDeviceSession with a CameraDeviceSession.
   status_t Initialize(
       const sp<V3_2::ICameraDeviceCallback>& callback,
-      std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session);
+      std::unique_ptr<google_camera_hal::CameraDeviceSession> device_session,
+      std::shared_ptr<HidlProfiler> hidl_profiler);
 
   // Create a metadata queue.
   // If override_size_property contains a valid size, it will create a metadata
@@ -200,14 +219,19 @@
   // Flag for profiling first frame processing time.
   bool first_frame_requested_ = false;
 
+  // The frame number of first capture request after configure stream
+  uint32_t first_request_frame_number_ = 0;
+
   std::mutex pending_first_frame_buffers_mutex_;
   // Profiling first frame process time. Stop timer when it become 0.
   // Must be protected by pending_first_frame_buffers_mutex_
   size_t num_pending_first_frame_buffers_ = 0;
+
+  std::shared_ptr<HidlProfiler> hidl_profiler_;
 };
 
 }  // namespace implementation
-}  // namespace V3_5
+}  // namespace V3_7
 }  // namespace device
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_camera_provider.cc b/common/hal/hidl_service/hidl_camera_provider.cc
index 7193a26..1c572e3 100644
--- a/common/hal/hidl_service/hidl_camera_provider.cc
+++ b/common/hal/hidl_service/hidl_camera_provider.cc
@@ -28,24 +28,24 @@
 namespace hardware {
 namespace camera {
 namespace provider {
-namespace V2_6 {
+namespace V2_7 {
 namespace implementation {
 
 namespace hidl_utils = ::android::hardware::camera::implementation::hidl_utils;
 
 using ::android::google_camera_hal::CameraDevice;
+using ::android::hardware::Void;
+using ::android::hardware::camera::common::V1_0::CameraDeviceStatus;
+using ::android::hardware::camera::common::V1_0::TorchModeStatus;
+using ::android::hardware::camera::common::V1_0::VendorTagSection;
 
 const std::string HidlCameraProvider::kProviderName = "internal";
 // "device@<version>/internal/<id>"
 const std::regex HidlCameraProvider::kDeviceNameRegex(
     "device@([0-9]+\\.[0-9]+)/internal/(.+)");
 
-std::unique_ptr<HidlCameraProvider> HidlCameraProvider::Create() {
-  auto provider = std::unique_ptr<HidlCameraProvider>(new HidlCameraProvider());
-  if (provider == nullptr) {
-    ALOGE("%s: Cannot create a HidlCameraProvider.", __FUNCTION__);
-    return nullptr;
-  }
+android::sp<HidlCameraProvider> HidlCameraProvider::Create() {
+  android::sp<HidlCameraProvider> provider = new HidlCameraProvider();
 
   status_t res = provider->Initialize();
   if (res != OK) {
@@ -85,7 +85,7 @@
             std::unique_lock<std::mutex> lock(callbacks_lock_);
             callbacks_->cameraDeviceStatusChange(
                 "device@" +
-                    device::V3_5::implementation::HidlCameraDevice::kDeviceVersion +
+                    device::V3_7::implementation::HidlCameraDevice::kDeviceVersion +
                     "/" + kProviderName + "/" + camera_id,
                 hidl_camera_device_status);
           }),
@@ -126,7 +126,7 @@
             std::unique_lock<std::mutex> lock(callbacks_lock_);
             callbacks_2_6_->physicalCameraDeviceStatusChange(
                 "device@" +
-                    device::V3_5::implementation::HidlCameraDevice::kDeviceVersion +
+                    device::V3_7::implementation::HidlCameraDevice::kDeviceVersion +
                     "/" + kProviderName + "/" + camera_id,
                 physical_camera_id, hidl_camera_device_status);
           }),
@@ -150,7 +150,7 @@
             std::unique_lock<std::mutex> lock(callbacks_lock_);
             callbacks_->torchModeStatusChange(
                 "device@" +
-                    device::V3_5::implementation::HidlCameraDevice::kDeviceVersion +
+                    device::V3_7::implementation::HidlCameraDevice::kDeviceVersion +
                     "/" + kProviderName + "/" + camera_id,
                 hidl_torch_status);
           }),
@@ -164,13 +164,23 @@
 
 Return<Status> HidlCameraProvider::setCallback(
     const sp<ICameraProviderCallback>& callback) {
+  bool first_time = false;
   {
     std::unique_lock<std::mutex> lock(callbacks_lock_);
+    first_time = callbacks_ == nullptr;
     callbacks_ = callback;
   }
-
   google_camera_provider_->TriggerDeferredCallbacks();
-
+#ifdef __ANDROID_APEX__
+  if (first_time) {
+    std::string ready_property_name = "vendor.camera.hal.ready.count";
+    int ready_count = property_get_int32(ready_property_name.c_str(), 0);
+    property_set(ready_property_name.c_str(),
+                 std::to_string(++ready_count).c_str());
+    ALOGI("HidlCameraProvider::setCallback() first time ready count: %d ",
+          ready_count);
+  }
+#endif
   return Status::OK;
 }
 
@@ -203,7 +213,6 @@
 Return<void> HidlCameraProvider::getCameraIdList(getCameraIdList_cb _hidl_cb) {
   std::vector<uint32_t> camera_ids;
   hidl_vec<hidl_string> hidl_camera_ids;
-
   status_t res = google_camera_provider_->GetCameraIdList(&camera_ids);
   if (res != OK) {
     ALOGE("%s: Getting camera ID list failed: %s(%d)", __FUNCTION__,
@@ -217,7 +226,7 @@
     // camera ID is in the form of "device@<major>.<minor>/<type>/<id>"
     hidl_camera_ids[i] =
         "device@" +
-        device::V3_5::implementation::HidlCameraDevice::kDeviceVersion + "/" +
+        device::V3_7::implementation::HidlCameraDevice::kDeviceVersion + "/" +
         kProviderName + "/" + std::to_string(camera_ids[i]);
   }
 
@@ -256,6 +265,21 @@
 }
 
 Return<void> HidlCameraProvider::isConcurrentStreamCombinationSupported(
+    const hidl_vec<provider::V2_6::CameraIdAndStreamCombination>& configs,
+    isConcurrentStreamCombinationSupported_cb _hidl_cb) {
+  hidl_vec<CameraIdAndStreamCombination> configs2_7;
+  configs2_7.resize(configs.size());
+  for (size_t i = 0; i < configs.size(); i++) {
+    configs2_7[i].cameraId = configs[i].cameraId;
+
+    hidl_utils::ConvertStreamConfigurationV34ToV37(
+        configs[i].streamConfiguration, &configs2_7[i].streamConfiguration);
+  }
+
+  return isConcurrentStreamCombinationSupported_2_7(configs2_7, _hidl_cb);
+}
+
+Return<void> HidlCameraProvider::isConcurrentStreamCombinationSupported_2_7(
     const hidl_vec<CameraIdAndStreamCombination>& configs,
     isConcurrentStreamCombinationSupported_cb _hidl_cb) {
   std::vector<google_camera_hal::CameraIdAndStreamConfiguration>
@@ -345,7 +369,7 @@
   }
 
   auto hidl_camera_device =
-      device::V3_5::implementation::HidlCameraDevice::Create(
+      device::V3_7::implementation::HidlCameraDevice::Create(
           std::move(google_camera_device));
   if (hidl_camera_device == nullptr) {
     ALOGE("%s: Creating HidlCameraDevice failed", __FUNCTION__);
@@ -362,24 +386,8 @@
   return Void();
 }
 
-ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name) {
-  std::string provider_name = HidlCameraProvider::kProviderName + "/0";
-  if (provider_name.compare(name) != 0) {
-    ALOGE("%s: Unknown provider name: %s", __FUNCTION__, name);
-    return nullptr;
-  }
-
-  auto provider = HidlCameraProvider::Create();
-  if (provider == nullptr) {
-    ALOGE("%s: Cannot create a HidlCameraProvider.", __FUNCTION__);
-    return nullptr;
-  }
-
-  return provider.release();
-}
-
 }  // namespace implementation
-}  // namespace V2_6
+}  // namespace V2_7
 }  // namespace provider
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_camera_provider.h b/common/hal/hidl_service/hidl_camera_provider.h
index 3981602..31efc14 100644
--- a/common/hal/hidl_service/hidl_camera_provider.h
+++ b/common/hal/hidl_service/hidl_camera_provider.h
@@ -17,31 +17,27 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_PROVIDER_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_CAMERA_PROVIDER_H_
 
-#include <android/hardware/camera/provider/2.6/ICameraProvider.h>
 #include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
+#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
+#include <regex>
 #include "camera_provider.h"
 
 namespace android {
 namespace hardware {
 namespace camera {
 namespace provider {
-namespace V2_6 {
+namespace V2_7 {
 namespace implementation {
 
 using ::android::sp;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::camera::common::V1_0::CameraDeviceStatus;
 using ::android::hardware::camera::common::V1_0::Status;
-using ::android::hardware::camera::common::V1_0::TorchModeStatus;
-using ::android::hardware::camera::common::V1_0::VendorTag;
-using ::android::hardware::camera::common::V1_0::VendorTagSection;
 using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback;
 using ::android::hardware::camera::provider::V2_5::DeviceState;
-using ::android::hardware::camera::provider::V2_6::CameraIdAndStreamCombination;
-using ::android::hardware::camera::provider::V2_6::ICameraProvider;
+using ::android::hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
+using ::android::hardware::camera::provider::V2_7::ICameraProvider;
 
 using ::android::google_camera_hal::CameraProvider;
 
@@ -51,7 +47,7 @@
 class HidlCameraProvider : public ICameraProvider {
  public:
   static const std::string kProviderName;
-  static std::unique_ptr<HidlCameraProvider> Create();
+  static android::sp<HidlCameraProvider> Create();
   virtual ~HidlCameraProvider() = default;
 
   // Override functions in ICameraProvider.
@@ -69,6 +65,10 @@
       getConcurrentStreamingCameraIds_cb _hidl_cb) override;
 
   Return<void> isConcurrentStreamCombinationSupported(
+      const hidl_vec<V2_6::CameraIdAndStreamCombination>& configs,
+      isConcurrentStreamCombinationSupported_cb _hidl_cb) override;
+
+  Return<void> isConcurrentStreamCombinationSupported_2_7(
       const hidl_vec<CameraIdAndStreamCombination>& configs,
       isConcurrentStreamCombinationSupported_cb _hidl_cb) override;
 
@@ -103,10 +103,8 @@
   google_camera_hal::CameraProviderCallback camera_provider_callback_;
 };
 
-extern "C" ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name);
-
 }  // namespace implementation
-}  // namespace V2_6
+}  // namespace V2_7
 }  // namespace provider
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_profiler.cc b/common/hal/hidl_service/hidl_profiler.cc
index e7e9ab9..7bb8027 100644
--- a/common/hal/hidl_service/hidl_profiler.cc
+++ b/common/hal/hidl_service/hidl_profiler.cc
@@ -16,184 +16,307 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "GCH_HidlProfiler"
+
+#include "hidl_profiler.h"
+
 #include <log/log.h>
+
+#include <memory>
 #include <mutex>
 #include <utility>
 
-#include "hidl_profiler.h"
+#include "profiler.h"
 
 namespace android {
 namespace hardware {
 namespace camera {
 namespace implementation {
-namespace hidl_profiler {
 namespace {
 
-struct HidlProfiler {
-  HidlProfiler() {
-    int32_t mode = property_get_int32("persist.camera.profiler.open_close", 0);
-    if (mode) {
-      // Use stop watch mode to print result.
-      mode |= google::camera_common::Profiler::SetPropFlag::kStopWatch;
+using ::google::camera_common::Profiler;
+
+// setprop key for profiling open/close camera
+constexpr char kPropKeyProfileOpenClose[] =
+    "persist.vendor.camera.profiler.open_close";
+// setprop key for profiling camera fps
+constexpr char kPropKeyProfileFps[] = "persist.vendor.camera.profiler.fps";
+
+constexpr char kFirstFrame[] = "First frame";
+constexpr char kHalTotal[] = "HAL Total";
+constexpr char kIdleString[] = "<-- IDLE -->";
+constexpr char kOverall[] = "Overall";
+
+class HidlProfilerImpl : public HidlProfiler {
+ public:
+  HidlProfilerImpl(uint32_t camera_id, int32_t latency_flag, int32_t fps_flag)
+      : camera_id_string_("Cam" + std::to_string(camera_id)),
+        camera_id_(camera_id),
+        latency_flag_(latency_flag),
+        fps_flag_(fps_flag) {
+  }
+
+  std::unique_ptr<HidlScopedProfiler> MakeScopedProfiler(
+      ScopedType type) override {
+    std::lock_guard lock(api_mutex_);
+
+    if (type == ScopedType::kConfigureStream && fps_profiler_ == nullptr) {
+      fps_profiler_ = CreateFpsProfiler();
     }
-    profiler = google::camera_common::Profiler::Create(mode);
-    if (profiler != nullptr) {
-      profiler->SetDumpFilePrefix(
+
+    if (latency_profiler_ == nullptr) {
+      latency_profiler_ = CreateLatencyProfiler();
+      if (latency_profiler_ != nullptr) {
+        has_camera_open_ = false;
+        config_count_ = 0;
+        flush_count_ = 0;
+        idle_count_ = 0;
+      } else {
+        return nullptr;
+      }
+    }
+
+    IdleEndLocked();
+
+    const char* name = nullptr;
+    int32_t id = 0;
+    switch (type) {
+      case ScopedType::kOpen:
+        name = "Open";
+        has_camera_open_ = true;
+        latency_profiler_->SetUseCase(camera_id_string_ + "-Open");
+        break;
+      case ScopedType::kConfigureStream:
+        name = "ConfigureStream";
+        if (!has_camera_open_) {
+          latency_profiler_->SetUseCase(camera_id_string_ + "-Reconfiguration");
+        }
+        id = config_count_++;
+        break;
+      case ScopedType::kFlush:
+        name = "Flush";
+        latency_profiler_->SetUseCase(camera_id_string_ + "-Flush");
+        id = flush_count_++;
+        break;
+      case ScopedType::kClose:
+        name = "Close";
+        latency_profiler_->SetUseCase(camera_id_string_ + "-Close");
+        break;
+      default:
+        ALOGE("%s: Unknown type %d", __FUNCTION__, type);
+        return nullptr;
+    }
+    return std::make_unique<HidlScopedProfiler>(
+        latency_profiler_, name, id, [this, type]() {
+          std::lock_guard lock(api_mutex_);
+          if (type == ScopedType::kClose) {
+            DeleteProfilerLocked();
+          } else {
+            IdleStartLocked();
+          }
+        });
+  }
+
+  void FirstFrameStart() override {
+    std::lock_guard lock(api_mutex_);
+    IdleEndLocked();
+    if (latency_profiler_ != nullptr) {
+      latency_profiler_->Start(kFirstFrame, Profiler::kInvalidRequestId);
+      latency_profiler_->Start(kHalTotal, Profiler::kInvalidRequestId);
+    }
+  }
+
+  void FirstFrameEnd() override {
+    std::lock_guard lock(api_mutex_);
+    if (latency_profiler_ != nullptr) {
+      latency_profiler_->End(kFirstFrame, Profiler::kInvalidRequestId);
+      latency_profiler_->End(kHalTotal, Profiler::kInvalidRequestId);
+      DeleteProfilerLocked();
+    }
+  }
+
+  void ProfileFrameRate(const std::string& name) override {
+    std::lock_guard lock(api_mutex_);
+    if (fps_profiler_ != nullptr) {
+      fps_profiler_->ProfileFrameRate(name);
+    }
+  }
+
+  void SetLatencyProfiler(std::unique_ptr<Profiler> profiler) override {
+    if (profiler == nullptr || latency_profiler_ == nullptr) {
+      return;
+    }
+    latency_profiler_ = std::move(profiler);
+    if (latency_profiler_ != nullptr) {
+      latency_profiler_->SetDumpFilePrefix(
           "/data/vendor/camera/profiler/hidl_open_close_");
-      profiler->Start("Overall", 0);
+      latency_profiler_->Start(kOverall, Profiler::kInvalidRequestId);
+      has_camera_open_ = false;
+      config_count_ = 0;
+      flush_count_ = 0;
+      idle_count_ = 0;
     }
   }
 
-  ~HidlProfiler() {
-    if (profiler != nullptr) {
-      profiler->End("Overall", 0);
+  void SetFpsProfiler(std::unique_ptr<Profiler> profiler) override {
+    if (profiler == nullptr || fps_profiler_ == nullptr) {
+      return;
+    }
+    fps_profiler_ = std::move(profiler);
+    if (fps_profiler_ != nullptr) {
+      fps_profiler_->SetDumpFilePrefix(
+          "/data/vendor/camera/profiler/hidl_fps_");
     }
   }
 
-  std::shared_ptr<google::camera_common::Profiler> profiler = nullptr;
-  bool has_camera_open = false;
-  uint8_t config_counter = 0;
-  uint8_t flush_counter = 0;
-  uint8_t connector_counter = 0;
+ private:
+  std::shared_ptr<Profiler> CreateLatencyProfiler() {
+    if (latency_flag_ == Profiler::SetPropFlag::kDisable) {
+      return nullptr;
+    }
+    std::shared_ptr<Profiler> profiler = Profiler::Create(latency_flag_);
+    if (profiler == nullptr) {
+      ALOGE("%s: Failed to create profiler", __FUNCTION__);
+      return nullptr;
+    }
+    profiler->SetDumpFilePrefix(
+        "/data/vendor/camera/profiler/hidl_open_close_");
+    profiler->Start(kOverall, Profiler::kInvalidRequestId);
+    return profiler;
+  }
+
+  std::shared_ptr<Profiler> CreateFpsProfiler() {
+    if (fps_flag_ == Profiler::SetPropFlag::kDisable) {
+      return nullptr;
+    }
+    std::shared_ptr<Profiler> profiler = Profiler::Create(fps_flag_);
+    if (profiler == nullptr) {
+      ALOGE("%s: Failed to create profiler", __FUNCTION__);
+      return nullptr;
+    }
+    profiler->SetDumpFilePrefix("/data/vendor/camera/profiler/hidl_fps_");
+    return profiler;
+  }
+
+  void DeleteProfilerLocked() {
+    if (latency_profiler_ != nullptr) {
+      latency_profiler_->End(kOverall, Profiler::kInvalidRequestId);
+      latency_profiler_ = nullptr;
+    }
+  }
+
+  void IdleStartLocked() {
+    if (latency_profiler_ != nullptr) {
+      latency_profiler_->Start(kIdleString, idle_count_++);
+    }
+  }
+
+  void IdleEndLocked() {
+    if (latency_profiler_ != nullptr && idle_count_ > 0) {
+      latency_profiler_->End(kIdleString, idle_count_ - 1);
+    }
+  }
+
+  uint32_t GetCameraId() const {
+    return camera_id_;
+  }
+  int32_t GetLatencyFlag() const {
+    return latency_flag_;
+  }
+  int32_t GetFpsFlag() const {
+    return fps_flag_;
+  }
+
+  const std::string camera_id_string_;
+  const uint32_t camera_id_;
+  const int32_t latency_flag_;
+  const int32_t fps_flag_;
+
+  // Protect all API functions mutually exclusive, all member variables should
+  // also be protected by this mutex.
+  std::mutex api_mutex_;
+  std::shared_ptr<Profiler> latency_profiler_;
+  std::shared_ptr<Profiler> fps_profiler_;
+  bool has_camera_open_;
+  uint8_t config_count_;
+  uint8_t flush_count_;
+  uint8_t idle_count_;
 };
 
-std::unique_ptr<HidlProfiler> gHidlProfiler = nullptr;
-// Mutex to make all API functions mutually exclusive.
-std::mutex api_mutex;
-
-void StartNewConnector() {
-  if (gHidlProfiler != nullptr && gHidlProfiler->profiler != nullptr) {
-    gHidlProfiler->profiler->Start("<-- IDLE -->",
-                                   ++gHidlProfiler->connector_counter);
+class HidlProfilerMock : public HidlProfiler {
+  std::unique_ptr<HidlScopedProfiler> MakeScopedProfiler(ScopedType) override {
+    return nullptr;
   }
-}
 
-void EndConnector() {
-  if (gHidlProfiler != nullptr && gHidlProfiler->profiler != nullptr &&
-      gHidlProfiler->connector_counter != 0) {
-    gHidlProfiler->profiler->End("<-- IDLE -->",
-                                 gHidlProfiler->connector_counter);
+  void FirstFrameStart() override {
   }
-}
 
-void EndProfiler() {
-  gHidlProfiler = nullptr;
-}
+  void FirstFrameEnd() override {
+  }
+
+  void ProfileFrameRate(const std::string&) override {
+  }
+
+  void SetLatencyProfiler(
+      std::unique_ptr<google::camera_common::Profiler> /* profiler */) override {
+  }
+
+  void SetFpsProfiler(
+      std::unique_ptr<google::camera_common::Profiler> /* profiler */) override {
+  }
+
+  uint32_t GetCameraId() const override {
+    return 0;
+  }
+  int32_t GetLatencyFlag() const override {
+    return 0;
+  }
+  int32_t GetFpsFlag() const override {
+    return 0;
+  }
+};
 
 }  // anonymous namespace
 
-std::unique_ptr<HidlProfilerItem> OnCameraOpen() {
-  std::lock_guard lock(api_mutex);
-  gHidlProfiler = std::make_unique<HidlProfiler>();
-  if (gHidlProfiler == nullptr || gHidlProfiler->profiler == nullptr) {
-    ALOGE("%s: gHidlProfiler or profiler is nullptr.", __FUNCTION__);
-    return nullptr;
+std::shared_ptr<HidlProfiler> HidlProfiler::Create(uint32_t camera_id) {
+  int32_t latency_flag = property_get_int32(
+      kPropKeyProfileOpenClose, Profiler::SetPropFlag::kCustomProfiler);
+  int32_t fps_flag = property_get_int32(kPropKeyProfileFps,
+                                        Profiler::SetPropFlag::kCustomProfiler);
+  if (latency_flag == Profiler::SetPropFlag::kDisable &&
+      fps_flag == Profiler::SetPropFlag::kDisable) {
+    return std::make_shared<HidlProfilerMock>();
   }
-
-  gHidlProfiler->has_camera_open = true;
-  gHidlProfiler->profiler->SetUseCase("Open Camera");
-
-  return std::make_unique<HidlProfilerItem>(gHidlProfiler->profiler, "Open",
-                                            StartNewConnector);
+  // Use stopwatch flag to print result.
+  if ((latency_flag & Profiler::SetPropFlag::kPrintBit) != 0) {
+    latency_flag |= Profiler::SetPropFlag::kStopWatch;
+  }
+  // Use interval flag to print fps instead of print on end.
+  if ((fps_flag & Profiler::SetPropFlag::kPrintBit) != 0) {
+    fps_flag |= Profiler::SetPropFlag::kPrintFpsPerIntervalBit;
+    fps_flag &= ~Profiler::SetPropFlag::kPrintBit;
+  }
+  return std::make_shared<HidlProfilerImpl>(camera_id, latency_flag, fps_flag);
 }
 
-std::unique_ptr<HidlProfilerItem> OnCameraFlush() {
-  std::lock_guard lock(api_mutex);
-  EndConnector();
-  if (gHidlProfiler == nullptr) {
-    gHidlProfiler = std::make_unique<HidlProfiler>();
-  }
-
-  if (gHidlProfiler == nullptr || gHidlProfiler->profiler == nullptr) {
-    ALOGE("%s: gHidlProfiler or profiler is nullptr.", __FUNCTION__);
-    return nullptr;
-  }
-  gHidlProfiler->profiler->SetUseCase("Flush Camera");
-  return std::make_unique<HidlProfilerItem>(gHidlProfiler->profiler, "Flush",
-                                            StartNewConnector,
-                                            gHidlProfiler->flush_counter++);
+HidlScopedProfiler::HidlScopedProfiler(std::shared_ptr<Profiler> profiler,
+                                       const std::string name, int id,
+                                       std::function<void()> end_callback)
+    : profiler_(profiler),
+      name_(std::move(name)),
+      id_(id),
+      end_callback_(end_callback) {
+  profiler_->Start(name_, id_);
+  profiler_->Start(kHalTotal, Profiler::kInvalidRequestId);
 }
 
-std::unique_ptr<HidlProfilerItem> OnCameraClose() {
-  std::lock_guard lock(api_mutex);
-  EndConnector();
-  if (gHidlProfiler == nullptr) {
-    gHidlProfiler = std::make_unique<HidlProfiler>();
-  }
-
-  if (gHidlProfiler == nullptr || gHidlProfiler->profiler == nullptr) {
-    ALOGE("%s: gHidlProfiler or profiler is nullptr.", __FUNCTION__);
-    return nullptr;
-  }
-
-  gHidlProfiler->profiler->SetUseCase("Close Camera");
-  return std::make_unique<HidlProfilerItem>(gHidlProfiler->profiler, "Close",
-                                            EndProfiler);
-}
-
-std::unique_ptr<HidlProfilerItem> OnCameraStreamConfigure() {
-  std::lock_guard lock(api_mutex);
-  EndConnector();
-  if (gHidlProfiler == nullptr) {
-    gHidlProfiler = std::make_unique<HidlProfiler>();
-  }
-
-  if (gHidlProfiler == nullptr || gHidlProfiler->profiler == nullptr) {
-    ALOGE("%s: gHidlProfiler or profiler is nullptr.", __FUNCTION__);
-    return nullptr;
-  }
-
-  if (!gHidlProfiler->has_camera_open) {
-    gHidlProfiler->profiler->SetUseCase("Reconfigure Stream");
-  }
-
-  return std::make_unique<HidlProfilerItem>(
-      gHidlProfiler->profiler, "configureStreams", StartNewConnector,
-      gHidlProfiler->config_counter++);
-}
-
-void OnFirstFrameRequest() {
-  std::lock_guard lock(api_mutex);
-  EndConnector();
-  if (gHidlProfiler != nullptr && gHidlProfiler->profiler != nullptr) {
-    gHidlProfiler->profiler->Start(
-        "First frame", google::camera_common::Profiler::kInvalidRequestId);
-    gHidlProfiler->profiler->Start(
-        "HAL Total", google::camera_common::Profiler::kInvalidRequestId);
+HidlScopedProfiler::~HidlScopedProfiler() {
+  profiler_->End(kHalTotal, Profiler::kInvalidRequestId);
+  profiler_->End(name_, id_);
+  if (end_callback_) {
+    end_callback_();
   }
 }
 
-void OnFirstFrameResult() {
-  std::lock_guard lock(api_mutex);
-  if (gHidlProfiler != nullptr && gHidlProfiler->profiler != nullptr) {
-    gHidlProfiler->profiler->End(
-        "First frame", google::camera_common::Profiler::kInvalidRequestId);
-    gHidlProfiler->profiler->End(
-        "HAL Total", google::camera_common::Profiler::kInvalidRequestId);
-    EndProfiler();
-  }
-}
-
-HidlProfilerItem::HidlProfilerItem(
-    std::shared_ptr<google::camera_common::Profiler> profiler,
-    const std::string target, std::function<void()> on_end, int request_id)
-    : profiler_(profiler), target_(std::move(target)), request_id_(request_id) {
-  on_end_ = on_end;
-  profiler_->Start(target_, request_id_);
-  profiler_->Start("HAL Total",
-                   google::camera_common::Profiler::kInvalidRequestId);
-}
-
-HidlProfilerItem::~HidlProfilerItem() {
-  profiler_->End("HAL Total",
-                 google::camera_common::Profiler::kInvalidRequestId);
-  profiler_->End(target_, request_id_);
-  if (on_end_) {
-    on_end_();
-  }
-}
-
-}  // namespace hidl_profiler
 }  // namespace implementation
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_profiler.h b/common/hal/hidl_service/hidl_profiler.h
index 5529d17..131a0c3 100644
--- a/common/hal/hidl_service/hidl_profiler.h
+++ b/common/hal/hidl_service/hidl_profiler.h
@@ -25,47 +25,63 @@
 namespace hardware {
 namespace camera {
 namespace implementation {
-namespace hidl_profiler {
 
-class HidlProfilerItem {
+class HidlScopedProfiler {
  public:
-  HidlProfilerItem(
-      std::shared_ptr<google::camera_common::Profiler> profiler,
-      const std::string target, std::function<void()> on_end,
-      int request_id = google::camera_common::Profiler::kInvalidRequestId);
+  HidlScopedProfiler(std::shared_ptr<google::camera_common::Profiler> profiler,
+                     const std::string name, int id,
+                     std::function<void()> end_callback);
 
-  ~HidlProfilerItem();
+  ~HidlScopedProfiler();
 
  private:
   std::shared_ptr<google::camera_common::Profiler> profiler_;
-  const std::string target_;
-  int request_id_;
-  std::function<void()> on_end_;
+  const std::string name_;
+  int id_;
+  std::function<void()> end_callback_;
 };
 
-// Start timer for open camera. The timer will stop when the returned
-// ProfilerItem is destroyed.
-std::unique_ptr<HidlProfilerItem> OnCameraOpen();
+class HidlProfiler {
+ public:
+  enum class ScopedType {
+    kOpen,
+    kConfigureStream,
+    kFlush,
+    kClose,
+  };
+  virtual ~HidlProfiler() = default;
 
-// Start timer for flush camera. The timer will stop when the returned
-// ProfilerItem is destroyed.
-std::unique_ptr<HidlProfilerItem> OnCameraFlush();
+  static std::shared_ptr<HidlProfiler> Create(uint32_t camera_id);
 
-// Start timer for close camera. The timer will stop when the returned
-// ProfilerItem is destroyed.
-std::unique_ptr<HidlProfilerItem> OnCameraClose();
+  // Make a ScopedProfiler for given type.
+  virtual std::unique_ptr<HidlScopedProfiler> MakeScopedProfiler(
+      ScopedType type) = 0;
 
-// Start timer for configure streams. The timer will stop when the returned
-// ProfilerItem is destroyed.
-std::unique_ptr<HidlProfilerItem> OnCameraStreamConfigure();
+  // Call when first frame is requested.
+  virtual void FirstFrameStart() = 0;
 
-// Call when first frame is requested.
-void OnFirstFrameRequest();
+  // Call when all bufer in first frame is received.
+  virtual void FirstFrameEnd() = 0;
 
-// Call when all bufer in first frame is received.
-void OnFirstFrameResult();
+  // Call to profile frame rate for each stream.
+  virtual void ProfileFrameRate(const std::string& name) = 0;
 
-}  // namespace hidl_profiler
+  // Give a customized latency profiler so that client side can intercept various calls.
+  virtual void SetLatencyProfiler(
+      std::unique_ptr<google::camera_common::Profiler> profiler) = 0;
+
+  // Give a customized fps profiler so that client side can intercept various calls.
+  virtual void SetFpsProfiler(
+      std::unique_ptr<google::camera_common::Profiler> profiler) = 0;
+
+  virtual uint32_t GetCameraId() const = 0;
+  virtual int32_t GetLatencyFlag() const = 0;
+  virtual int32_t GetFpsFlag() const = 0;
+
+ protected:
+  HidlProfiler() = default;
+};
+
 }  // namespace implementation
 }  // namespace camera
 }  // namespace hardware
diff --git a/common/hal/hidl_service/hidl_utils.cc b/common/hal/hidl_service/hidl_utils.cc
index 7967a43..5c7602d 100644
--- a/common/hal/hidl_service/hidl_utils.cc
+++ b/common/hal/hidl_service/hidl_utils.cc
@@ -33,8 +33,10 @@
 using ::android::hardware::camera::device::V3_2::ErrorMsg;
 using ::android::hardware::camera::device::V3_2::MsgType;
 using ::android::hardware::camera::device::V3_2::ShutterMsg;
-using ::android::hardware::camera::device::V3_5::implementation::HidlCameraDevice;
-using ::android::hardware::camera::provider::V2_6::implementation::HidlCameraProvider;
+using ::android::hardware::camera::device::V3_7::implementation::HidlCameraDevice;
+using android::hardware::camera::metadata::V3_6::
+    CameraMetadataEnumAndroidSensorPixelMode;
+using ::android::hardware::camera::provider::V2_7::implementation::HidlCameraProvider;
 
 status_t ConvertToHidlVendorTagType(
     google_camera_hal::CameraMetadataType hal_type,
@@ -190,29 +192,30 @@
   hidl_hal_stream_config->streams.resize(hal_configured_streams.size());
 
   for (uint32_t i = 0; i < hal_configured_streams.size(); i++) {
+    hidl_hal_stream_config->streams[i].supportOffline = false;
     if (hal_configured_streams[i].is_physical_camera_stream) {
-      hidl_hal_stream_config->streams[i].physicalCameraId =
+      hidl_hal_stream_config->streams[i].v3_4.physicalCameraId =
           std::to_string(hal_configured_streams[i].physical_camera_id);
     }
 
-    hidl_hal_stream_config->streams[i].v3_3.overrideDataSpace =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.overrideDataSpace =
         hal_configured_streams[i].override_data_space;
 
-    hidl_hal_stream_config->streams[i].v3_3.v3_2.id =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.v3_2.id =
         hal_configured_streams[i].id;
 
-    hidl_hal_stream_config->streams[i].v3_3.v3_2.overrideFormat =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.v3_2.overrideFormat =
         (::android::hardware::graphics::common::V1_0::PixelFormat)
             hal_configured_streams[i]
                 .override_format;
 
-    hidl_hal_stream_config->streams[i].v3_3.v3_2.producerUsage =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.v3_2.producerUsage =
         hal_configured_streams[i].producer_usage;
 
-    hidl_hal_stream_config->streams[i].v3_3.v3_2.consumerUsage =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.v3_2.consumerUsage =
         hal_configured_streams[i].consumer_usage;
 
-    hidl_hal_stream_config->streams[i].v3_3.v3_2.maxBuffers =
+    hidl_hal_stream_config->streams[i].v3_4.v3_3.v3_2.maxBuffers =
         hal_configured_streams[i].max_buffers;
   }
 
@@ -712,11 +715,11 @@
     return BAD_VALUE;
   }
 
-  hal_request->frame_number = hidl_request.v3_2.frameNumber;
+  hal_request->frame_number = hidl_request.v3_4.v3_2.frameNumber;
 
   status_t res = ConvertToHalMetadata(
-      hidl_request.v3_2.fmqSettingsSize, request_metadata_queue,
-      hidl_request.v3_2.settings, &hal_request->settings);
+      hidl_request.v3_4.v3_2.fmqSettingsSize, request_metadata_queue,
+      hidl_request.v3_4.v3_2.settings, &hal_request->settings);
   if (res != OK) {
     ALOGE("%s: Converting metadata failed: %s(%d)", __FUNCTION__,
           strerror(-res), res);
@@ -724,8 +727,9 @@
   }
 
   google_camera_hal::StreamBuffer hal_buffer = {};
-  if (hidl_request.v3_2.inputBuffer.buffer != nullptr) {
-    res = ConvertToHalStreamBuffer(hidl_request.v3_2.inputBuffer, &hal_buffer);
+  if (hidl_request.v3_4.v3_2.inputBuffer.buffer != nullptr) {
+    res = ConvertToHalStreamBuffer(hidl_request.v3_4.v3_2.inputBuffer,
+                                   &hal_buffer);
     if (res != OK) {
       ALOGE("%s: Converting hal stream buffer failed: %s(%d)", __FUNCTION__,
             strerror(-res), res);
@@ -733,9 +737,11 @@
     }
 
     hal_request->input_buffers.push_back(hal_buffer);
+    hal_request->input_width = hidl_request.inputWidth;
+    hal_request->input_height = hidl_request.inputHeight;
   }
 
-  for (auto& buffer : hidl_request.v3_2.outputBuffers) {
+  for (auto& buffer : hidl_request.v3_4.v3_2.outputBuffers) {
     hal_buffer = {};
     status_t res = ConvertToHalStreamBuffer(buffer, &hal_buffer);
     if (res != OK) {
@@ -747,7 +753,7 @@
     hal_request->output_buffers.push_back(hal_buffer);
   }
 
-  for (auto hidl_physical_settings : hidl_request.physicalCameraSettings) {
+  for (auto hidl_physical_settings : hidl_request.v3_4.physicalCameraSettings) {
     std::unique_ptr<google_camera_hal::HalCameraMetadata> hal_physical_settings;
     res = ConvertToHalMetadata(
         hidl_physical_settings.fmqSettingsSize, request_metadata_queue,
@@ -809,6 +815,16 @@
   return OK;
 }
 
+static bool sensorPixelModeContains(const device::V3_7::Stream& hidl_stream,
+                                    uint32_t key) {
+  for (auto& i : hidl_stream.sensorPixelModesUsed) {
+    if (i == static_cast<CameraMetadataEnumAndroidSensorPixelMode>(key)) {
+      return true;
+    }
+  }
+  return false;
+}
+
 status_t ConverToHalStreamConfig(
     const StreamConfiguration& hidl_stream_config,
     google_camera_hal::StreamConfiguration* hal_stream_config) {
@@ -819,27 +835,35 @@
 
   status_t res;
 
-  for (auto hidl_stream : hidl_stream_config.v3_4.streams) {
+  for (auto hidl_stream : hidl_stream_config.streams) {
     google_camera_hal::Stream hal_stream;
-    res = ConvertToHalStream(hidl_stream, &hal_stream);
+    res = ConvertToHalStream(hidl_stream.v3_4, &hal_stream);
     if (res != OK) {
       ALOGE("%s: Converting to HAL stream failed: %s(%d)", __FUNCTION__,
             strerror(-res), res);
       return res;
     }
+    hal_stream.group_id = hidl_stream.groupId;
 
+    hal_stream.used_in_max_resolution_mode = sensorPixelModeContains(
+        hidl_stream, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+    hal_stream.used_in_default_resolution_mode =
+        hidl_stream.sensorPixelModesUsed.size() > 0
+            ? sensorPixelModeContains(hidl_stream,
+                                      ANDROID_SENSOR_PIXEL_MODE_DEFAULT)
+            : true;
     hal_stream_config->streams.push_back(hal_stream);
   }
 
-  res = ConvertToHalStreamConfigurationMode(
-      hidl_stream_config.v3_4.operationMode, &hal_stream_config->operation_mode);
+  res = ConvertToHalStreamConfigurationMode(hidl_stream_config.operationMode,
+                                            &hal_stream_config->operation_mode);
   if (res != OK) {
     ALOGE("%s: Converting to HAL opeation mode failed: %s(%d)", __FUNCTION__,
           strerror(-res), res);
     return res;
   }
 
-  res = ConvertToHalMetadata(0, nullptr, hidl_stream_config.v3_4.sessionParams,
+  res = ConvertToHalMetadata(0, nullptr, hidl_stream_config.sessionParams,
                              &hal_stream_config->session_params);
   if (res != OK) {
     ALOGE("%s: Converting to HAL metadata failed: %s(%d)", __FUNCTION__,
@@ -849,6 +873,8 @@
 
   hal_stream_config->stream_config_counter =
       hidl_stream_config.streamConfigCounter;
+  hal_stream_config->multi_resolution_input_image =
+      hidl_stream_config.multiResolutionInputImage;
 
   return OK;
 }
@@ -1087,6 +1113,26 @@
 
   return OK;
 }
+
+status_t ConvertStreamConfigurationV34ToV37(
+    const device::V3_4::StreamConfiguration& config_3_4,
+    StreamConfiguration* config_3_7) {
+  if (config_3_7 == nullptr) {
+    ALOGE("%s: config_3_7 is nullptr.", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  config_3_7->streams.resize(config_3_4.streams.size());
+  for (size_t i = 0; i < config_3_4.streams.size(); i++) {
+    config_3_7->streams[i].v3_4 = config_3_4.streams[i];
+    config_3_7->streams[i].groupId = -1;
+  }
+  config_3_7->operationMode = config_3_4.operationMode;
+  config_3_7->sessionParams = config_3_4.sessionParams;
+  config_3_7->multiResolutionInputImage = false;
+
+  return OK;
+}
 }  // namespace hidl_utils
 }  // namespace implementation
 }  // namespace camera
diff --git a/common/hal/hidl_service/hidl_utils.h b/common/hal/hidl_service/hidl_utils.h
index 72b0ac0..46c43e1 100644
--- a/common/hal/hidl_service/hidl_utils.h
+++ b/common/hal/hidl_service/hidl_utils.h
@@ -18,7 +18,7 @@
 #define HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_UTILS_H_
 
 #include <android/hardware/camera/common/1.0/types.h>
-#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 #include <fmq/MessageQueue.h>
 #include <hal_types.h>
 #include <hidl/HidlSupport.h>
@@ -47,16 +47,16 @@
 using ::android::hardware::camera::device::V3_2::StreamConfigurationMode;
 using ::android::hardware::camera::device::V3_2::StreamRotation;
 using ::android::hardware::camera::device::V3_2::StreamType;
-using ::android::hardware::camera::device::V3_4::CaptureRequest;
 using ::android::hardware::camera::device::V3_4::CaptureResult;
-using ::android::hardware::camera::device::V3_4::HalStreamConfiguration;
 using ::android::hardware::camera::device::V3_4::Stream;
 using ::android::hardware::camera::device::V3_5::BufferRequest;
 using ::android::hardware::camera::device::V3_5::BufferRequestStatus;
 using ::android::hardware::camera::device::V3_5::StreamBufferRequestError;
 using ::android::hardware::camera::device::V3_5::StreamBufferRet;
 using ::android::hardware::camera::device::V3_5::StreamBuffersVal;
-using ::android::hardware::camera::device::V3_5::StreamConfiguration;
+using ::android::hardware::camera::device::V3_6::HalStreamConfiguration;
+using ::android::hardware::camera::device::V3_7::CaptureRequest;
+using ::android::hardware::camera::device::V3_7::StreamConfiguration;
 
 // Util functions to convert the types between HIDL and Google Camera HAL.
 
@@ -184,10 +184,15 @@
 status_t ConvertToHalBufferReturnStatus(
     const StreamBufferRet& hidl_stream_buffer_return,
     google_camera_hal::BufferReturn* hal_buffer_return);
+
+status_t ConvertStreamConfigurationV34ToV37(
+    const device::V3_4::StreamConfiguration& config_3_4,
+    StreamConfiguration* config_3_7);
+
 }  // namespace hidl_utils
 }  // namespace implementation
 }  // namespace camera
 }  // namespace hardware
 }  // namespace android
 
-#endif  // HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_UTILS_H_
\ No newline at end of file
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_HIDL_SERVICE_HIDL_UTILS_H_
diff --git a/common/hal/hidl_service/libc_wrappers.cc b/common/hal/hidl_service/libc_wrappers.cc
new file mode 100644
index 0000000..45b260b
--- /dev/null
+++ b/common/hal/hidl_service/libc_wrappers.cc
@@ -0,0 +1,205 @@
+#include <dirent.h>
+#include <dlfcn.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <string>
+#include <unordered_map>
+
+#define LOG_TAG "GCH_libc_wrappers"
+#include <log/log.h>
+
+//#define MONITOR_FILE_OP
+
+#ifdef __ANDROID_APEX__
+
+#ifdef MONITOR_FILE_OP
+
+using mkfifo_function_type = int (*)(const char*, mode_t);
+using access_function_type = int (*)(const char*, int);
+using mkdir_function_type = int (*)(const char*, mode_t);
+using rmdir_function_type = int (*)(const char*);
+using chdir_function_type = int (*)(const char*);
+using link_function_type = int (*)(const char*, const char*);
+using unlink_function_type = int (*)(const char*);
+using rename_function_type = int (*)(const char*, const char*);
+using stat_function_type = int (*)(const char*, struct stat*);
+using chmod_function_type = int (*)(const char*, mode_t);
+using chown_function_type = int (*)(const char*, uid_t, gid_t);
+using utime_function_type = int (*)(const char*, struct utimbuf*);
+using opendir_function_type = DIR* (*)(const char*);
+using execl_function_type = int (*)(const char*, const char*, ...);
+using execle_function_type = int (*)(const char*, const char*, ...);
+using execlp_function_type = int (*)(const char*, const char*, ...);
+using execv_function_type = int (*)(const char*, char* const[]);
+using execvp_function_type = int (*)(const char*, char* const[]);
+using openat_function_type = int (*)(int, const char*, int, ...);
+
+extern "C" int mkfifo(const char* pathname, mode_t mode) {
+  static auto real_function =
+      reinterpret_cast<mkfifo_function_type>(dlsym(RTLD_NEXT, "mkfifo"));
+  int ret = real_function(pathname, mode);
+  ALOGI("mkfifo (%s,%d) == %d", pathname, mode, ret);
+  return ret;
+}
+extern "C" int access(const char* pathname, int mode) {
+  static auto real_function =
+      reinterpret_cast<access_function_type>(dlsym(RTLD_NEXT, "access"));
+  int ret = real_function(pathname, mode);
+  ALOGI("access (%s,%d) == %d", pathname, mode, ret);
+  return ret;
+}
+extern "C" int mkdir(const char* pathname, mode_t mode) {
+  static auto real_function =
+      reinterpret_cast<mkdir_function_type>(dlsym(RTLD_NEXT, "mkdir"));
+  int ret = real_function(pathname, mode);
+  ALOGI("mkdir (%s,%d) == %d", pathname, mode, ret);
+  return ret;
+}
+extern "C" int rmdir(const char* pathname) {
+  static auto real_function =
+      reinterpret_cast<rmdir_function_type>(dlsym(RTLD_NEXT, "rmdir"));
+  int ret = real_function(pathname);
+  ALOGI("rmdir (%s) == %d", pathname, ret);
+  return ret;
+}
+extern "C" int chdir(const char* path) {
+  static auto real_function =
+      reinterpret_cast<chdir_function_type>(dlsym(RTLD_NEXT, "chdir"));
+  int ret = real_function(path);
+  ALOGI("chdir (%s) == %d", path, ret);
+  return ret;
+}
+extern "C" int link(const char* oldpath, const char* newpath) {
+  static auto real_function =
+      reinterpret_cast<link_function_type>(dlsym(RTLD_NEXT, "link"));
+  int ret = real_function(oldpath, newpath);
+  ALOGI("link (%s,%s) == %d", oldpath, newpath, ret);
+  return ret;
+}
+extern "C" int unlink(const char* pathname) {
+  static auto real_function =
+      reinterpret_cast<unlink_function_type>(dlsym(RTLD_NEXT, "unlink"));
+  int ret = real_function(pathname);
+  ALOGI("unlink (%s) == %d", pathname, ret);
+  return ret;
+}
+extern "C" int rename(const char* oldpath, const char* newpath) {
+  static auto real_function =
+      reinterpret_cast<rename_function_type>(dlsym(RTLD_NEXT, "rename"));
+  int ret = real_function(oldpath, newpath);
+  ALOGI("rename (%s,%s) == %d", oldpath, newpath, ret);
+  return ret;
+}
+extern "C" int stat(const char* file_name, struct stat* buf) {
+  static auto real_function =
+      reinterpret_cast<stat_function_type>(dlsym(RTLD_NEXT, "stat"));
+  int ret = real_function(file_name, buf);
+  ALOGI("stat (%s, %p) == %d", file_name, buf, ret);
+  return ret;
+}
+extern "C" int chmod(const char* path, mode_t mode) {
+  static auto real_function =
+      reinterpret_cast<chmod_function_type>(dlsym(RTLD_NEXT, "chmod"));
+  int ret = real_function(path, mode);
+  ALOGI("chmod (%s) == %d", path, ret);
+  return ret;
+}
+extern "C" int chown(const char* path, uid_t owner, gid_t group) {
+  static auto real_function =
+      reinterpret_cast<chown_function_type>(dlsym(RTLD_NEXT, "chown"));
+  int ret = real_function(path, owner, group);
+  ALOGI("chown (%s,%d,%d) == %d", path, owner, group, ret);
+  return ret;
+}
+extern "C" int utime(const char* filename, struct utimbuf* buf) {
+  static auto real_function =
+      reinterpret_cast<utime_function_type>(dlsym(RTLD_NEXT, "utime"));
+  int ret = real_function(filename, buf);
+  ALOGI("utime (%s, %p) == %d", filename, buf, ret);
+  return ret;
+}
+extern "C" DIR* opendir(const char* name) {
+  static auto real_function =
+      reinterpret_cast<opendir_function_type>(dlsym(RTLD_NEXT, "opendir"));
+  DIR* ret = real_function(name);
+  ALOGI("opendir (%s) == %p", name, ret);
+  return ret;
+}
+extern "C" int execl(const char* path, const char* arg, ...) {
+  static auto real_function =
+      reinterpret_cast<execl_function_type>(dlsym(RTLD_NEXT, "execl"));
+  int ret = real_function(path, arg);
+  ALOGI("execl (%s, %s) == %d", path, arg, ret);
+  return ret;
+}
+extern "C" int execle(const char* path, const char* arg, ...) {
+  static auto real_function =
+      reinterpret_cast<execle_function_type>(dlsym(RTLD_NEXT, "execle"));
+  int ret = real_function(path, arg);
+  ALOGI("execle (%s, %s) == %d", path, arg, ret);
+  return ret;
+}
+extern "C" int execlp(const char* file, const char* arg, ...) {
+  static auto real_function =
+      reinterpret_cast<execlp_function_type>(dlsym(RTLD_NEXT, "execlp"));
+  int ret = real_function(file, arg);
+  ALOGI("execlp %s, %s) == %d", file, arg, ret);
+  return ret;
+}
+extern "C" int execv(const char* path, char* const argv[]) {
+  static auto real_function =
+      reinterpret_cast<execv_function_type>(dlsym(RTLD_NEXT, "execv"));
+  int ret = real_function(path, argv);
+  ALOGI("execv (%s, %s, ...) == %d", path, argv[0], ret);
+  return ret;
+}
+extern "C" int execvp(const char* file, char* const argv[]) {
+  static auto real_function =
+      reinterpret_cast<execvp_function_type>(dlsym(RTLD_NEXT, "execvp"));
+  int ret = real_function(file, argv);
+  ALOGI("execvp (%s, %s,...) == %d", file, argv[0], ret);
+  return ret;
+}
+
+extern "C" int openat(int dirfd, const char* pathname, int flags, ...) {
+  static auto real_function =
+      reinterpret_cast<openat_function_type>(dlsym(RTLD_NEXT, "openat"));
+  int ret = real_function(dirfd, pathname, flags);
+  ALOGI("openat(%d, %s, 0x%x) == %d", dirfd, pathname, flags, ret);
+  return ret;
+}
+#endif
+
+using dlopen_function_type = void* (*)(const char*, int);
+
+// This is a temporary workaround for prebuilts calling dlopen with absolute
+// paths.
+extern "C" void* dlopen(const char* filename, int flags) {
+  static auto real_dlopen_function =
+      reinterpret_cast<dlopen_function_type>(dlsym(RTLD_NEXT, "dlopen"));
+  if (!real_dlopen_function) {
+    ALOGE("Could not RTLD_NEXT dlopen, something very wrong.");
+    std::abort();
+  }
+  void* ret = real_dlopen_function(filename, flags);
+  if (!ret) {
+    ALOGI("dlopen(%s) failed, seeing if we can fix it", filename);
+    std::string original_filename(filename);
+
+    if (original_filename.find("/vendor/") == 0) {
+      std::string new_filename = "/apex/com.google.pixel.camera.hal/" +
+                                 original_filename.substr(strlen("/vendor/"));
+      ALOGI("Trying %s instead of %s\n", new_filename.c_str(), filename);
+      ret = real_dlopen_function(new_filename.c_str(), flags);
+      if (ret) {
+        ALOGE(
+            "ERROR: Update your code to not use absolute paths. dlopen(%s) "
+            "failed but dlopen(%s) succeeded instead.",
+            filename, new_filename.c_str());
+      }
+    }
+  }
+  return ret;
+}
+#endif
diff --git a/common/hal/hidl_service/service.cc b/common/hal/hidl_service/service.cc
index 2b203d7..5acf397 100644
--- a/common/hal/hidl_service/service.cc
+++ b/common/hal/hidl_service/service.cc
@@ -15,19 +15,25 @@
  */
 
 #ifdef LAZY_SERVICE
-#define LOG_TAG "android.hardware.pixel.camera.provider@2.6-service-lazy"
+#define LOG_TAG "android.hardware.pixel.camera.provider@2.7-service-lazy"
 #else
-#define LOG_TAG "android.hardware.pixel.camera.provider@2.6-service"
+#define LOG_TAG "android.hardware.pixel.camera.provider@2.7-service"
 #endif
 
-#include <android/hardware/camera/provider/2.6/ICameraProvider.h>
+#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
+#include <apex_update_listener.h>
 #include <binder/ProcessState.h>
-#include <hidl/LegacySupport.h>
+#include <cutils/properties.h>
+#include <hidl/HidlLazyUtils.h>
+#include <hidl/HidlTransportSupport.h>
 #include <malloc.h>
+#include <utils/Errors.h>
 
-using android::hardware::defaultLazyPassthroughServiceImplementation;
-using android::hardware::defaultPassthroughServiceImplementation;
-using android::hardware::camera::provider::V2_6::ICameraProvider;
+#include "hidl_camera_build_version.h"
+#include "hidl_camera_provider.h"
+
+using ::android::hardware::camera::provider::V2_7::ICameraProvider;
+using ::android::hardware::camera::provider::V2_7::implementation::HidlCameraProvider;
 
 #ifdef LAZY_SERVICE
 const bool kLazyService = true;
@@ -41,15 +47,50 @@
   // /dev/vndbinder
   mallopt(M_DECAY_TIME, 1);
   android::ProcessState::initWithDriver("/dev/vndbinder");
-  int res;
-  if (kLazyService) {
-    res = defaultLazyPassthroughServiceImplementation<ICameraProvider>(
-        "internal/0", /*maxThreads*/ 6);
-  } else {
-    res = defaultPassthroughServiceImplementation<ICameraProvider>(
-        "internal/0", /*maxThreads*/ 6);
-  }
+  android::hardware::configureRpcThreadpool(/*maxThreads=*/6,
+                                            /*callerWillJoin=*/true);
 
-  ALOGE("Google camera provider service ending with res %d", res);
-  return res;
+#ifdef __ANDROID_APEX__
+  int start_count = property_get_int32("vendor.camera.hal.start.count", 0);
+  property_set("vendor.camera.hal.start.count",
+               std::to_string(++start_count).c_str());
+  property_set("vendor.camera.hal.version",
+               std::to_string(kHalManifestBuildNumber).c_str());
+  property_set("vendor.camera.hal.build_id", kAndroidBuildId);
+  auto start_on_update =
+      ApexUpdateListener::Make("com.google.pixel.camera.hal", [](auto, auto) {
+        ALOGI("APEX version updated. starting.");
+        exit(0);
+      });
+  ALOGI(
+      "Using ApexUpdateListener: %p Start Count: %d Current Version: %s "
+      "(%ld)",
+      start_on_update.get(), start_count, kAndroidBuildId,
+      kHalManifestBuildNumber);
+#else
+  ALOGI("Not using ApexUpdateListener since not running in an apex.");
+#endif
+
+  android::sp<ICameraProvider> camera_provider = HidlCameraProvider::Create();
+  if (camera_provider == nullptr) {
+    return android::NO_INIT;
+  }
+  if (kLazyService) {
+    android::hardware::LazyServiceRegistrar& lazy_registrar =
+        android::hardware::LazyServiceRegistrar::getInstance();
+    if (lazy_registrar.registerService(camera_provider, "internal/0") !=
+        android::OK) {
+      ALOGE("Cannot register Google camera provider lazy service");
+      return android::NO_INIT;
+    }
+  } else {
+    if (camera_provider->registerAsService("internal/0") != android::OK) {
+      ALOGE("Cannot register Google camera provider service");
+      return android::NO_INIT;
+    }
+  }
+  android::hardware::joinRpcThreadpool();
+
+  // In normal operation, the threadpool should never return.
+  return EXIT_FAILURE;
 }
diff --git a/common/hal/hidl_service/version_script.sh b/common/hal/hidl_service/version_script.sh
new file mode 100755
index 0000000..2ab4e2b
--- /dev/null
+++ b/common/hal/hidl_service/version_script.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+#
+# This script generates the apex manifest version number (which is also used for
+# the outer aab/jar object's version available to PackageManager).
+#
+# That version is limited to INT_MAX
+# Strategy:
+#   if(local eng build)
+#      version = 2147480000
+#   else
+#      version = numeric part of build number
+#
+# 2147480000 is chosen as being a value that can install over any expected build
+# server build number that is still a little smaller than INT_MAX to leave room
+# for maneuvering
+
+default_eng_build_number=2147480000
+
+build_number=$(cat $OUT_DIR/soong/build_number.txt)
+if [[ "$build_number" == "eng."* ]]; then
+  numeric_build_number=$default_eng_build_number
+else
+  numeric_build_number=$(cat $OUT_DIR/soong/build_number.txt | tr -d -c 0-9)
+  if [[ -z "$numeric_build_number" ]]; then
+    numeric_build_number=$default_eng_build_number
+  fi
+  if ((numeric_build_number < 1)); then
+    numeric_build_number=1
+  fi
+  if ((numeric_build_number >= default_eng_build_number)); then
+    numeric_build_number=$((default_eng_build_number-1))
+  fi
+fi
+
+cat $1 | sed -E "s/\{BUILD_NUMBER\}/$numeric_build_number/g" | sed -E "s/\{BUILD_ID\}/$build_number/g" > $2
+
diff --git a/common/hal/hwl_interface/camera_device_hwl.h b/common/hal/hwl_interface/camera_device_hwl.h
index 481246b..19cd5fa 100644
--- a/common/hal/hwl_interface/camera_device_hwl.h
+++ b/common/hal/hwl_interface/camera_device_hwl.h
@@ -23,6 +23,7 @@
 #include "camera_device_session_hwl.h"
 #include "hal_camera_metadata.h"
 #include "hal_types.h"
+#include "profiler.h"
 
 namespace android {
 namespace google_camera_hal {
@@ -74,6 +75,12 @@
   // supported. stream_config contains the stream configurations.
   virtual bool IsStreamCombinationSupported(
       const StreamConfiguration& stream_config) = 0;
+
+  // Get customized profiler
+  virtual std::unique_ptr<google::camera_common::Profiler> GetProfiler(
+      uint32_t /* camera_id */, int /* option */) {
+    return nullptr;
+  }
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/hwl_interface/camera_device_session_hwl.h b/common/hal/hwl_interface/camera_device_session_hwl.h
index b40bc79..0dcfebd 100644
--- a/common/hal/hwl_interface/camera_device_session_hwl.h
+++ b/common/hal/hwl_interface/camera_device_session_hwl.h
@@ -22,6 +22,7 @@
 #include "hal_camera_metadata.h"
 #include "hwl_types.h"
 #include "multicam_coordinator_hwl.h"
+#include "profiler.h"
 #include "session_data_defs.h"
 #include "zoom_ratio_mapper_hwl.h"
 
@@ -107,9 +108,8 @@
   // more than one request from a certain pipeline, this method will return an
   // error. All requests captured from camera sensors must be captured
   // synchronously.
-  virtual status_t SubmitRequests(
-      uint32_t frame_number,
-      const std::vector<HwlPipelineRequest>& requests) = 0;
+  virtual status_t SubmitRequests(uint32_t frame_number,
+                                  std::vector<HwlPipelineRequest>& requests) = 0;
 
   // Flush all pending requests.
   virtual status_t Flush() = 0;
@@ -164,6 +164,12 @@
 
   // Get zoom ratio mapper from HWL.
   virtual std::unique_ptr<ZoomRatioMapperHwl> GetZoomRatioMapperHwl() = 0;
+
+  // Get customized profiler
+  virtual std::unique_ptr<google::camera_common::Profiler> GetProfiler(
+      uint32_t /* camera_id */, int /* option */) {
+    return nullptr;
+  }
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/capture_session.h b/common/hal/hwl_interface/capture_session.h
similarity index 91%
rename from common/hal/google_camera_hal/capture_session.h
rename to common/hal/hwl_interface/capture_session.h
index 970ba01..6cfe7a1 100644
--- a/common/hal/google_camera_hal/capture_session.h
+++ b/common/hal/hwl_interface/capture_session.h
@@ -60,8 +60,8 @@
   virtual status_t Flush() = 0;
 };
 
-// ExternalCaptureSessionFactory defines the interface of an external capture session,
-// in addition to `class CaptureSession`.
+// ExternalCaptureSessionFactory defines the interface of an external capture
+// session, in addition to `class CaptureSession`.
 class ExternalCaptureSessionFactory {
  public:
   virtual ~ExternalCaptureSessionFactory() = default;
@@ -78,14 +78,15 @@
       CameraDeviceSessionHwl* device_session_hwl,
       const StreamConfiguration& stream_config,
       ProcessCaptureResultFunc process_capture_result, NotifyFunc notify,
-      HwlRequestBuffersFunc request_stream_buffers,
+      HwlSessionCallback session_callback,
       std::vector<HalStream>* hal_configured_streams,
       CameraBufferAllocatorHwl* camera_allocator_hwl) = 0;
 };
 
-// GetCaptureSessionFactory should be called by the client to discover an external
-// capture session via dlsym.
-extern "C" ExternalCaptureSessionFactory* GetCaptureSessionFactory();
+#if !GCH_HWL_USE_DLOPEN
+extern "C" __attribute__((weak)) ExternalCaptureSessionFactory*
+GetCaptureSessionFactory();
+#endif
 
 }  // namespace google_camera_hal
 }  // namespace android
diff --git a/common/hal/hwl_interface/hwl_types.h b/common/hal/hwl_interface/hwl_types.h
index 84781de..cfe4622 100644
--- a/common/hal/hwl_interface/hwl_types.h
+++ b/common/hal/hwl_interface/hwl_types.h
@@ -45,6 +45,9 @@
   std::vector<std::unique_ptr<HalCameraMetadata>> input_buffer_metadata;
 
   std::vector<StreamBuffer> output_buffers;
+
+  int32_t input_width;
+  int32_t input_height;
 };
 
 // Define a HWL pipeline result.
diff --git a/common/hal/google_camera_hal/multicam_coordinator.h b/common/hal/hwl_interface/multicam_coordinator.h
similarity index 100%
rename from common/hal/google_camera_hal/multicam_coordinator.h
rename to common/hal/hwl_interface/multicam_coordinator.h
diff --git a/common/hal/google_camera_hal/multicam_roi_translator.h b/common/hal/hwl_interface/multicam_roi_translator.h
similarity index 100%
rename from common/hal/google_camera_hal/multicam_roi_translator.h
rename to common/hal/hwl_interface/multicam_roi_translator.h
diff --git a/common/hal/google_camera_hal/process_block.h b/common/hal/hwl_interface/process_block.h
similarity index 86%
rename from common/hal/google_camera_hal/process_block.h
rename to common/hal/hwl_interface/process_block.h
index 3e84c59..8303aba 100644
--- a/common/hal/google_camera_hal/process_block.h
+++ b/common/hal/hwl_interface/process_block.h
@@ -96,6 +96,25 @@
   virtual status_t Flush() = 0;
 };
 
+// ExternalProcessBlockFactory defines the interface of an external process
+// block, in addition to `class ProcessBlock`.
+class ExternalProcessBlockFactory {
+ public:
+  virtual ~ExternalProcessBlockFactory() = default;
+
+  // Create is called by the client to create a process block and get a unique
+  // pointer to the process block.
+  virtual std::unique_ptr<ProcessBlock> CreateProcessBlock(
+      CameraDeviceSessionHwl* device_session_hwl) = 0;
+
+  virtual std::string GetBlockName() const = 0;
+};
+
+#if !GCH_HWL_USE_DLOPEN
+extern "C" __attribute__((weak)) ExternalProcessBlockFactory*
+GetSnapshotProcessBlockFactory();
+#endif
+
 }  // namespace google_camera_hal
 }  // namespace android
 
diff --git a/common/hal/google_camera_hal/request_processor.h b/common/hal/hwl_interface/request_processor.h
similarity index 100%
rename from common/hal/google_camera_hal/request_processor.h
rename to common/hal/hwl_interface/request_processor.h
diff --git a/common/hal/google_camera_hal/result_processor.h b/common/hal/hwl_interface/result_processor.h
similarity index 100%
rename from common/hal/google_camera_hal/result_processor.h
rename to common/hal/hwl_interface/result_processor.h
diff --git a/common/hal/hwl_interface/zoom_ratio_mapper_hwl.h b/common/hal/hwl_interface/zoom_ratio_mapper_hwl.h
index edfc825..56d2ad9 100644
--- a/common/hal/hwl_interface/zoom_ratio_mapper_hwl.h
+++ b/common/hal/hwl_interface/zoom_ratio_mapper_hwl.h
@@ -30,6 +30,16 @@
   // Limit zoom ratio if concurrent mode is on
   virtual void LimitZoomRatioIfConcurrent(float* zoom_ratio) const = 0;
 
+  // Helper method to get the active array size for this particular capture
+  // request / result (which might be useful for device specific features).
+  // Returns a bool specifying whether the 'out' Dimension parameter has a valid
+  // dimension.
+  virtual bool GetActiveArrayDimensionToBeUsed(
+      uint32_t /*camera_id*/,
+      const HalCameraMetadata* /*settings / result_metadata*/,
+      Dimension* /*active_array_dimension*/) const {
+    return false;
+  }
   // Apply zoom ratio to capture request
   virtual void UpdateCaptureRequest(CaptureRequest* request) = 0;
 
diff --git a/common/hal/tests/Android.bp b/common/hal/tests/Android.bp
index f4c06d2..88d7ce0 100644
--- a/common/hal/tests/Android.bp
+++ b/common/hal/tests/Android.bp
@@ -25,7 +25,7 @@
 
 cc_library_headers {
     name: "libgoogle_camera_hal_tests_headers",
-    vendor_available: true,
+    vendor: true,
     export_include_dirs: [
         ".",
     ],
@@ -34,8 +34,9 @@
 cc_library_shared {
     name: "libgoogle_camera_hal_tests",
     defaults: ["google_camera_hal_defaults"],
+    compile_multilib: "first",
     owner: "google",
-    vendor_available: true,
+    vendor: true,
     srcs: [
         "camera_device_session_tests.cc",
         "camera_device_tests.cc",
@@ -61,6 +62,7 @@
         "android.hardware.graphics.mapper@2.0",
         "android.hardware.graphics.mapper@3.0",
         "android.hardware.graphics.mapper@4.0",
+        "lib_profiler",
         "libcamera_metadata",
         "libcutils",
         "libgooglecamerahal",
@@ -77,11 +79,15 @@
     header_libs: [
         "libhardware_headers",
     ],
+    export_shared_lib_headers: [
+        "lib_profiler",
+    ],
     export_include_dirs: ["."],
 }
 
 cc_test {
     name: "google_camera_hal_tests",
+    compile_multilib: "first",
     owner: "google",
     vendor: true,
     gtest: true,
diff --git a/common/hal/tests/camera_device_session_tests.cc b/common/hal/tests/camera_device_session_tests.cc
index 823ece7..75682a2 100644
--- a/common/hal/tests/camera_device_session_tests.cc
+++ b/common/hal/tests/camera_device_session_tests.cc
@@ -20,6 +20,7 @@
 #include <sys/stat.h>
 
 #include <gtest/gtest.h>
+#include "utils.h"
 
 #include <algorithm>
 
@@ -44,33 +45,6 @@
     "/vendor/lib/camera/capture_sessions/";
 #endif
 
-// Returns an array of regular files under dir_path.
-static std::vector<std::string> FindLibraryPaths(const char* dir_path) {
-  std::vector<std::string> libs;
-
-  errno = 0;
-  DIR* dir = opendir(dir_path);
-  if (!dir) {
-    ALOGD("%s: Unable to open directory %s (%s)", __FUNCTION__, dir_path,
-          strerror(errno));
-    return libs;
-  }
-
-  struct dirent* entry = nullptr;
-  while ((entry = readdir(dir)) != nullptr) {
-    std::string lib_path(dir_path);
-    lib_path += entry->d_name;
-    struct stat st;
-    if (stat(lib_path.c_str(), &st) == 0) {
-      if (S_ISREG(st.st_mode)) {
-        libs.push_back(lib_path);
-      }
-    }
-  }
-
-  return libs;
-}
-
 class CameraDeviceSessionTests : public ::testing::Test {
  protected:
   static constexpr uint32_t kCaptureTimeoutMs = 3000;
@@ -94,7 +68,8 @@
       return OK;
     }
 
-    for (const auto& lib_path : FindLibraryPaths(kExternalCaptureSessionDir)) {
+    for (const auto& lib_path :
+         utils::FindLibraryPaths(kExternalCaptureSessionDir)) {
       ALOGI("%s: Loading %s", __FUNCTION__, lib_path.c_str());
       void* lib_handle = nullptr;
       lib_handle = dlopen(lib_path.c_str(), RTLD_NOW);
diff --git a/common/hal/tests/mock_device_hwl.h b/common/hal/tests/mock_device_hwl.h
index 1080705..18b7ef4 100644
--- a/common/hal/tests/mock_device_hwl.h
+++ b/common/hal/tests/mock_device_hwl.h
@@ -17,9 +17,9 @@
 #ifndef HARDWARE_GOOGLE_CAMERA_HAL_TESTS_MOCK_DEVICE_HWL_H_
 #define HARDWARE_GOOGLE_CAMERA_HAL_TESTS_MOCK_DEVICE_HWL_H_
 
-#include <camera_device_hwl.h>
 #include <unordered_map>
 
+#include "camera_device_hwl.h"
 #include "mock_device_session_hwl.h"
 
 namespace android {
@@ -114,6 +114,12 @@
   bool IsStreamCombinationSupported(const StreamConfiguration& /*stream_config*/) {
     return true;
   }
+
+  std::unique_ptr<google::camera_common::Profiler> GetProfiler(
+      uint32_t /* camera_id */, int /* option */) {
+    return nullptr;
+  }
+
   // Override functions in CameraDeviceHwl end.
 
   // The following members are public so the test can change the values easily.
diff --git a/common/hal/tests/mock_device_session_hwl.cc b/common/hal/tests/mock_device_session_hwl.cc
index 452f072..43e914e 100644
--- a/common/hal/tests/mock_device_session_hwl.cc
+++ b/common/hal/tests/mock_device_session_hwl.cc
@@ -15,13 +15,12 @@
  */
 
 #define LOG_TAG "MockDeviceSessionHwl"
-#include <log/log.h>
+#include "mock_device_session_hwl.h"
 
 #include <hardware/gralloc.h>
+#include <log/log.h>
 #include <system/graphics-base.h>
 
-#include "mock_device_session_hwl.h"
-
 namespace android {
 namespace google_camera_hal {
 
@@ -168,7 +167,7 @@
 }
 
 status_t FakeCameraDeviceSessionHwl::SubmitRequests(
-    uint32_t frame_number, const std::vector<HwlPipelineRequest>& requests) {
+    uint32_t frame_number, std::vector<HwlPipelineRequest>& requests) {
   std::lock_guard<std::mutex> lock(hwl_pipeline_lock_);
 
   for (auto& request : requests) {
@@ -283,6 +282,12 @@
   return nullptr;
 }
 
+std::unique_ptr<google::camera_common::Profiler>
+FakeCameraDeviceSessionHwl::GetProfiler(uint32_t /* camera_id */,
+                                        int /* option */) {
+  return nullptr;
+}
+
 std::unique_ptr<IMulticamCoordinatorHwl>
 FakeCameraDeviceSessionHwl::CreateMulticamCoordinatorHwl() {
   // Multicam coordinator not supported in this mock
diff --git a/common/hal/tests/mock_device_session_hwl.h b/common/hal/tests/mock_device_session_hwl.h
index 5e8a427..76b512b 100644
--- a/common/hal/tests/mock_device_session_hwl.h
+++ b/common/hal/tests/mock_device_session_hwl.h
@@ -20,6 +20,7 @@
 #include <camera_device_session.h>
 #include <gmock/gmock.h>
 
+#include "profiler.h"
 #include "session_data_defs.h"
 
 namespace android {
@@ -66,9 +67,8 @@
 
   void DestroyPipelines() override;
 
-  status_t SubmitRequests(
-      uint32_t frame_number,
-      const std::vector<HwlPipelineRequest>& requests) override;
+  status_t SubmitRequests(uint32_t frame_number,
+                          std::vector<HwlPipelineRequest>& requests) override;
 
   status_t Flush() override;
 
@@ -102,6 +102,9 @@
 
   std::unique_ptr<ZoomRatioMapperHwl> GetZoomRatioMapperHwl() override;
 
+  std::unique_ptr<google::camera_common::Profiler> GetProfiler(
+      uint32_t camera_id, int option) override;
+
  private:
   const uint32_t kCameraId;
   const std::vector<uint32_t> kPhysicalCameraIds;
@@ -155,7 +158,7 @@
 
   MOCK_METHOD2(SubmitRequests,
                status_t(uint32_t frame_number,
-                        const std::vector<HwlPipelineRequest>& requests));
+                        std::vector<HwlPipelineRequest>& requests));
 
   MOCK_METHOD0(Flush, status_t());
 
@@ -194,6 +197,9 @@
 
   MOCK_METHOD0(GetZoomRatioMapperHwl, std::unique_ptr<ZoomRatioMapperHwl>());
 
+  MOCK_METHOD2(GetProfiler, std::unique_ptr<google::camera_common::Profiler>(
+                                uint32_t camera_id, int option));
+
   // Delegate all calls to FakeCameraDeviceSessionHwl.
   void DelegateCallsToFakeSession();
 
diff --git a/common/hal/tests/zsl_buffer_manager_tests.cc b/common/hal/tests/zsl_buffer_manager_tests.cc
index c5b83c8..d72b250 100644
--- a/common/hal/tests/zsl_buffer_manager_tests.cc
+++ b/common/hal/tests/zsl_buffer_manager_tests.cc
@@ -97,7 +97,7 @@
     res = manager->ReturnFilledBuffer(i, stream_buffer);
     ASSERT_EQ(res, OK) << "ReturnFilledBuffer failed: " << strerror(res);
     auto metadata = HalCameraMetadata::Create(kNumEntries, kDataBytes);
-    res = manager->ReturnMetadata(i, metadata.get());
+    res = manager->ReturnMetadata(i, metadata.get(), /*partial_result=*/1);
     ASSERT_EQ(res, OK) << "ReturnMetadata failed: " << strerror(res);
   }
 }
@@ -133,7 +133,8 @@
 
       auto metadata = HalCameraMetadata::Create(kNumEntries, kDataBytes);
       SetMetadata(metadata);
-      res = manager->ReturnMetadata(frame_index, metadata.get());
+      res = manager->ReturnMetadata(frame_index, metadata.get(),
+                                    /*partial_result=*/1);
       ASSERT_EQ(res, OK) << "ReturnMetadata failed: " << strerror(res);
       frame_index++;
     }
@@ -169,7 +170,7 @@
   // when zsl buffer manager allocated_metadata_ size < 100
   for (uint32_t i = 0; i < kTestCycle; i++) {
     auto metadata = HalCameraMetadata::Create(kNumEntries, kDataBytes);
-    res = manager->ReturnMetadata(i, metadata.get());
+    res = manager->ReturnMetadata(i, metadata.get(), /*partial_result=*/1);
     ASSERT_EQ(res, OK) << "ReturnMetadata failed: " << strerror(res)
                        << " at:" << i;
   }
@@ -178,7 +179,7 @@
   // when zsl buffer manager allocated_metadata_ size >= 100
   for (uint32_t i = kTestCycle; i < kTestCycle + 20; i++) {
     auto metadata = HalCameraMetadata::Create(kNumEntries, kDataBytes);
-    res = manager->ReturnMetadata(i, metadata.get());
+    res = manager->ReturnMetadata(i, metadata.get(), /*partial_result=*/1);
     ASSERT_EQ(res, OK) << "ReturnMetadata failed: " << strerror(res)
                        << " at:" << i;
   }
diff --git a/common/hal/utils/Android.bp b/common/hal/utils/Android.bp
index fbf5e21..6b2dc58 100644
--- a/common/hal/utils/Android.bp
+++ b/common/hal/utils/Android.bp
@@ -28,11 +28,18 @@
     name: "libgooglecamerahalutils",
     defaults: ["google_camera_hal_defaults"],
     owner: "google",
-    vendor_available: true,
+    vendor: true,
     srcs: [
         "camera_id_manager.cc",
         "gralloc_buffer_allocator.cc",
         "hal_camera_metadata.cc",
+        "hal_utils.cc",
+        "hdrplus_process_block.cc",
+        "hdrplus_request_processor.cc",
+        "hdrplus_result_processor.cc",
+        "hwl_buffer_allocator.cc",
+        "internal_stream_manager.cc",
+        "multicam_realtime_process_block.cc",
         "pipeline_request_id_manager.cc",
         "result_dispatcher.cc",
         "stream_buffer_cache_manager.cc",
@@ -42,13 +49,19 @@
         "zsl_buffer_manager.cc",
     ],
     shared_libs: [
+        "lib_profiler",
         "libcamera_metadata",
         "libcutils",
+        "libgrallocusage",
         "libhardware",
         "liblog",
         "libutils",
+        "libui",
         "libsync",
     ],
+    export_shared_lib_headers: [
+        "lib_profiler",
+    ],
     export_include_dirs: ["."],
     include_dirs: [
         "system/media/private/camera/include"
diff --git a/common/hal/utils/gralloc_buffer_allocator.cc b/common/hal/utils/gralloc_buffer_allocator.cc
index 5131247..88bb7f6 100644
--- a/common/hal/utils/gralloc_buffer_allocator.cc
+++ b/common/hal/utils/gralloc_buffer_allocator.cc
@@ -20,6 +20,9 @@
 #include <log/log.h>
 #include <utils/Trace.h>
 
+#include <grallocusage/GrallocUsageConversion.h>
+#include <ui/GraphicBufferAllocator.h>
+
 #include "gralloc_buffer_allocator.h"
 
 namespace android {
@@ -27,109 +30,19 @@
 
 std::unique_ptr<IHalBufferAllocator> GrallocBufferAllocator::Create() {
   ATRACE_CALL();
-  auto gralloc_buffer =
+  auto gralloc_buffer_allocator =
       std::unique_ptr<GrallocBufferAllocator>(new GrallocBufferAllocator());
-  if (gralloc_buffer == nullptr) {
-    ALOGE("%s: Creating gralloc_buffer failed.", __FUNCTION__);
-    return nullptr;
-  }
-
-  status_t result = gralloc_buffer->Initialize();
-  if (result != OK) {
-    ALOGE("%s: GrallocBuffer Initialize failed.", __FUNCTION__);
+  if (gralloc_buffer_allocator == nullptr) {
+    ALOGE("%s: Creating gralloc_buffer_allocator failed.", __FUNCTION__);
     return nullptr;
   }
 
   std::unique_ptr<IHalBufferAllocator> base_allocator;
-  base_allocator.reset(gralloc_buffer.release());
+  base_allocator.reset(gralloc_buffer_allocator.release());
 
   return base_allocator;
 }
 
-GrallocBufferAllocator::~GrallocBufferAllocator() {
-  if (device_ != nullptr) {
-    gralloc1_close(device_);
-  }
-}
-
-status_t GrallocBufferAllocator::Initialize() {
-  ATRACE_CALL();
-  int32_t error =
-      hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t**)&module_);
-
-  if (error < 0) {
-    ALOGE("%s: Could not load GRALLOC HAL module: %d (%s)", __FUNCTION__, error,
-          strerror(-error));
-    return INVALID_OPERATION;
-  }
-
-  gralloc1_open(module_, &device_);
-  if (device_ == nullptr) {
-    ALOGE("%s: gralloc1 open failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
-
-  InitGrallocInterface(GRALLOC1_FUNCTION_CREATE_DESCRIPTOR, &create_descriptor_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_DESTROY_DESCRIPTOR,
-                       &destroy_descriptor_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_SET_DIMENSIONS, &set_dimensions_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_SET_FORMAT, &set_format_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_SET_CONSUMER_USAGE,
-                       &set_consumer_usage_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_SET_PRODUCER_USAGE,
-                       &set_producer_usage_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_GET_STRIDE, &get_stride_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_ALLOCATE, &allocate_);
-  InitGrallocInterface(GRALLOC1_FUNCTION_RELEASE, &release_);
-  return OK;
-}
-
-template <typename T>
-void GrallocBufferAllocator::InitGrallocInterface(
-    gralloc1_function_descriptor_t desc, T* output_function) {
-  ATRACE_CALL();
-  auto function = device_->getFunction(device_, desc);
-  if (!function) {
-    ALOGE("%s: failed to get gralloc1 function %d", __FUNCTION__, desc);
-  }
-  *output_function = reinterpret_cast<T>(function);
-}
-
-status_t GrallocBufferAllocator::SetupDescriptor(
-    const BufferDescriptor& buffer_descriptor,
-    gralloc1_buffer_descriptor_t* output_descriptor) {
-  ATRACE_CALL();
-  int32_t error =
-      set_dimensions_(device_, *output_descriptor, buffer_descriptor.width,
-                      buffer_descriptor.height);
-  if (error != GRALLOC1_ERROR_NONE) {
-    ALOGE("%s: set_dimensions failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
-
-  error = set_format_(device_, *output_descriptor, buffer_descriptor.format);
-  if (error != GRALLOC1_ERROR_NONE) {
-    ALOGE("%s: set_format failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
-
-  error = set_producer_usage_(device_, *output_descriptor,
-                              buffer_descriptor.producer_flags);
-  if (error != GRALLOC1_ERROR_NONE) {
-    ALOGE("%s: set_producer_usage failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
-
-  error = set_consumer_usage_(device_, *output_descriptor,
-                              buffer_descriptor.consumer_flags);
-  if (error != GRALLOC1_ERROR_NONE) {
-    ALOGE("%s: set_consumer_usage_ failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
-
-  return OK;
-}
-
 void GrallocBufferAllocator::ConvertHalBufferDescriptor(
     const HalBufferDescriptor& hal_buffer_descriptor,
     BufferDescriptor* gralloc_buffer_descriptor) {
@@ -157,58 +70,34 @@
     const HalBufferDescriptor& buffer_descriptor,
     std::vector<buffer_handle_t>* buffers) {
   ATRACE_CALL();
-  // This function runs four operations.
-  // 1. create descriptor
-  // 2. setup descriptor
-  // 3. allocate buffer via descriptor
-  // 4. destroy descriptor_
-  gralloc1_buffer_descriptor_t descriptor;
-  int32_t error = create_descriptor_(device_, &descriptor);
-  if (error != GRALLOC1_ERROR_NONE) {
-    ALOGE("%s: create descriptor failed", __FUNCTION__);
-    return INVALID_OPERATION;
-  }
 
   BufferDescriptor gralloc_buffer_descriptor{0};
   ConvertHalBufferDescriptor(buffer_descriptor, &gralloc_buffer_descriptor);
-  status_t result = SetupDescriptor(gralloc_buffer_descriptor, &descriptor);
-  if (result != OK) {
-    ALOGE("%s: SetupDescriptor failed", __FUNCTION__);
-    destroy_descriptor_(device_, descriptor);
-    return INVALID_OPERATION;
-  }
 
-  uint32_t stride = 0, temp_stride = 0;
+  status_t err = OK;
+  uint32_t stride = 0;
   for (uint32_t i = 0; i < gralloc_buffer_descriptor.num_buffers; i++) {
     buffer_handle_t buffer;
-    error = allocate_(device_, 1, &descriptor, &buffer);
-    if (error != GRALLOC1_ERROR_NONE) {
-      ALOGE("%s: buffer(%d) allocate failed", __FUNCTION__, i);
+    err = GraphicBufferAllocator::get().allocate(
+        gralloc_buffer_descriptor.width, gralloc_buffer_descriptor.height,
+        gralloc_buffer_descriptor.format, /*layerCount*/ 1u,
+        android_convertGralloc1To0Usage(
+            gralloc_buffer_descriptor.producer_flags,
+            gralloc_buffer_descriptor.consumer_flags),
+        &buffer, &stride, "GCHGrallocBufferAllocator");
+    if (err != OK) {
+      ALOGE("%s: Failed to allocate gralloc buffer: %s(%d)", __FUNCTION__,
+            strerror(-err), err);
       break;
     }
     buffers->push_back(buffer);
-
-    error = get_stride_(device_, buffer, &temp_stride);
-    if (error != GRALLOC1_ERROR_NONE) {
-      ALOGE("%s: buffer(%d) get_stride failed", __FUNCTION__, i);
-      break;
-    }
-    // Check non-uniform strides
-    if (stride == 0) {
-      stride = temp_stride;
-    } else if (stride != temp_stride) {
-      ALOGE("%s: non-uniform strides (%d) != (%d)", __FUNCTION__, stride,
-            temp_stride);
-      error = GRALLOC1_ERROR_UNSUPPORTED;
-      break;
-    }
   }
 
-  destroy_descriptor_(device_, descriptor);
-  if (error != GRALLOC1_ERROR_NONE) {
+  if (err != OK) {
     FreeBuffers(buffers);
     return INVALID_OPERATION;
   }
+
   return OK;
 }
 
@@ -216,7 +105,7 @@
   ATRACE_CALL();
   for (auto buffer : *buffers) {
     if (buffer != nullptr) {
-      release_(device_, buffer);
+      GraphicBufferAllocator::get().free(buffer);
     }
   }
   buffers->clear();
diff --git a/common/hal/utils/gralloc_buffer_allocator.h b/common/hal/utils/gralloc_buffer_allocator.h
index bf4836f..9c4bc15 100644
--- a/common/hal/utils/gralloc_buffer_allocator.h
+++ b/common/hal/utils/gralloc_buffer_allocator.h
@@ -26,7 +26,7 @@
 namespace android {
 namespace google_camera_hal {
 
-// Gralloc1 buffer data structure
+// Gralloc buffer data structure
 struct BufferDescriptor {
   uint32_t width = 0;
   uint32_t height = 0;
@@ -41,7 +41,7 @@
   // Creates GrallocBuffer and allocate buffers
   static std::unique_ptr<IHalBufferAllocator> Create();
 
-  virtual ~GrallocBufferAllocator();
+  virtual ~GrallocBufferAllocator() = default;
 
   // Allocate buffers and return buffer via buffers.
   // The buffers is owned by caller
@@ -54,39 +54,15 @@
   GrallocBufferAllocator() = default;
 
  private:
-  status_t Initialize();
 
   // Do not support the copy constructor or assignment operator
   GrallocBufferAllocator(const GrallocBufferAllocator&) = delete;
   GrallocBufferAllocator& operator=(const GrallocBufferAllocator&) = delete;
 
-  // Init gralloc1 interface function pointer
-  template <typename T>
-  void InitGrallocInterface(gralloc1_function_descriptor_t desc,
-                            T* output_function);
-
-  // Setup descriptor before allocate buffer via gralloc1 interface
-  status_t SetupDescriptor(const BufferDescriptor& buffer_descriptor,
-                           gralloc1_buffer_descriptor_t* output_descriptor);
-
   // Convert HAL buffer descriptor to gralloc descriptor
   void ConvertHalBufferDescriptor(
       const HalBufferDescriptor& hal_buffer_descriptor,
       BufferDescriptor* gralloc_buffer_descriptor);
-
-  hw_module_t* module_ = nullptr;
-  gralloc1_device_t* device_ = nullptr;
-
-  // Gralloc1 Interface
-  GRALLOC1_PFN_CREATE_DESCRIPTOR create_descriptor_ = nullptr;
-  GRALLOC1_PFN_DESTROY_DESCRIPTOR destroy_descriptor_ = nullptr;
-  GRALLOC1_PFN_SET_DIMENSIONS set_dimensions_ = nullptr;
-  GRALLOC1_PFN_SET_FORMAT set_format_ = nullptr;
-  GRALLOC1_PFN_SET_CONSUMER_USAGE set_consumer_usage_ = nullptr;
-  GRALLOC1_PFN_SET_PRODUCER_USAGE set_producer_usage_ = nullptr;
-  GRALLOC1_PFN_GET_STRIDE get_stride_ = nullptr;
-  GRALLOC1_PFN_ALLOCATE allocate_ = nullptr;
-  GRALLOC1_PFN_RELEASE release_ = nullptr;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/utils/hal_buffer_allocator.h b/common/hal/utils/hal_buffer_allocator.h
index bbf0ee4..a321632 100644
--- a/common/hal/utils/hal_buffer_allocator.h
+++ b/common/hal/utils/hal_buffer_allocator.h
@@ -19,7 +19,6 @@
 
 #include "hal_types.h"
 
-#include <hardware/gralloc1.h>
 #include <utils/Errors.h>
 #include <vector>
 
diff --git a/common/hal/utils/hal_camera_metadata.cc b/common/hal/utils/hal_camera_metadata.cc
index 493e3aa..98ca1f6 100644
--- a/common/hal/utils/hal_camera_metadata.cc
+++ b/common/hal/utils/hal_camera_metadata.cc
@@ -622,7 +622,7 @@
   return Append(hal_metadata->ReleaseCameraMetadata());
 }
 
-status_t HalCameraMetadata::Append(camera_metadata_t* metadata) {
+status_t HalCameraMetadata::Append(const camera_metadata_t* metadata) {
   if (metadata == nullptr) {
     ALOGE("%s: metadata is nullptr", __FUNCTION__);
     return BAD_VALUE;
diff --git a/common/hal/utils/hal_camera_metadata.h b/common/hal/utils/hal_camera_metadata.h
index 9bf4cdd..7eefb10 100644
--- a/common/hal/utils/hal_camera_metadata.h
+++ b/common/hal/utils/hal_camera_metadata.h
@@ -121,7 +121,7 @@
   status_t Append(std::unique_ptr<HalCameraMetadata> hal_metadata);
 
   // Append metadata from a raw camera_metadata buffer
-  status_t Append(camera_metadata_t* metadata);
+  status_t Append(const camera_metadata_t* metadata);
 
   // Get metadata entry size
   size_t GetEntryCount() const;
diff --git a/common/hal/google_camera_hal/hal_utils.cc b/common/hal/utils/hal_utils.cc
similarity index 95%
rename from common/hal/google_camera_hal/hal_utils.cc
rename to common/hal/utils/hal_utils.cc
index 22635da..db87e50 100644
--- a/common/hal/google_camera_hal/hal_utils.cc
+++ b/common/hal/utils/hal_utils.cc
@@ -42,6 +42,8 @@
   hwl_request->settings = HalCameraMetadata::Clone(request.settings.get());
   hwl_request->input_buffers = request.input_buffers;
   hwl_request->output_buffers = request.output_buffers;
+  hwl_request->input_width = request.input_width;
+  hwl_request->input_height = request.input_height;
 
   for (auto& metadata : request.input_buffer_metadata) {
     hwl_request->input_buffer_metadata.push_back(
@@ -113,9 +115,13 @@
 }
 
 bool ContainsOutputBuffer(const CaptureRequest& request,
-                          const buffer_handle_t& buffer) {
+                          const StreamBuffer& buffer) {
   for (auto& request_buffer : request.output_buffers) {
-    if (request_buffer.buffer == buffer) {
+    if (request_buffer.buffer == buffer.buffer) {
+      return true;
+    } else if (buffer.buffer == nullptr &&
+               request_buffer.stream_id == buffer.stream_id) {
+      // Framework passed in an empty buffer and HAL allocated the buffer.
       return true;
     }
   }
@@ -130,7 +136,7 @@
     bool found = false;
 
     for (auto& block_request : process_block_requests) {
-      if (ContainsOutputBuffer(block_request.request, buffer.buffer)) {
+      if (ContainsOutputBuffer(block_request.request, buffer)) {
         found = true;
         break;
       }
@@ -336,7 +342,7 @@
     return false;
   }
 
-  if (property_get_bool("persist.camera.hdrplus.disable", false)) {
+  if (property_get_bool("persist.vendor.camera.hdrplus.disable", false)) {
     ALOGI("%s: HDR+ is disabled by property", __FUNCTION__);
     return false;
   }
@@ -356,7 +362,7 @@
     return false;
   }
 
-  if (property_get_bool("persist.camera.fatp.enable", false)) {
+  if (property_get_bool("persist.vendor.camera.fatp.enable", false)) {
     ALOGI("%s: Do not use HDR+ for FATP mode", __FUNCTION__);
     return false;
   }
@@ -448,7 +454,7 @@
   }
 
   // TODO(b/128633958): remove this after depth block is in place
-  if (property_get_bool("persist.camera.rgbird.forceinternal", false)) {
+  if (property_get_bool("persist.vendor.camera.rgbird.forceinternal", false)) {
     return false;
   }
 
@@ -654,10 +660,11 @@
         stream_configuration.operation_mode);
   for (uint32_t i = 0; i < stream_configuration.streams.size(); i++) {
     auto& stream = stream_configuration.streams[i];
-    ALOGI("==== [%u]stream_id %d, format %d, res %ux%u, usage %" PRIu64
-          ", is_phy %d, phy_cam_id %u",
-          i, stream.id, stream.format, stream.width, stream.height, stream.usage,
-          stream.is_physical_camera_stream, stream.physical_camera_id);
+    ALOGI("==== [%u]stream_id %d, type %d, format %d, res %ux%u, usage %" PRIu64
+          ", is_phy %d, phy_cam_id %u, group_id %d",
+          i, stream.id, stream.stream_type, stream.format, stream.width,
+          stream.height, stream.usage, stream.is_physical_camera_stream,
+          stream.physical_camera_id, stream.group_id);
   }
   ALOGI("%s", str.c_str());
 }
diff --git a/common/hal/google_camera_hal/hal_utils.h b/common/hal/utils/hal_utils.h
similarity index 100%
rename from common/hal/google_camera_hal/hal_utils.h
rename to common/hal/utils/hal_utils.h
diff --git a/common/hal/google_camera_hal/hdrplus_process_block.cc b/common/hal/utils/hdrplus_process_block.cc
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_process_block.cc
rename to common/hal/utils/hdrplus_process_block.cc
diff --git a/common/hal/google_camera_hal/hdrplus_process_block.h b/common/hal/utils/hdrplus_process_block.h
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_process_block.h
rename to common/hal/utils/hdrplus_process_block.h
diff --git a/common/hal/google_camera_hal/hdrplus_request_processor.cc b/common/hal/utils/hdrplus_request_processor.cc
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_request_processor.cc
rename to common/hal/utils/hdrplus_request_processor.cc
diff --git a/common/hal/google_camera_hal/hdrplus_request_processor.h b/common/hal/utils/hdrplus_request_processor.h
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_request_processor.h
rename to common/hal/utils/hdrplus_request_processor.h
diff --git a/common/hal/google_camera_hal/hdrplus_result_processor.cc b/common/hal/utils/hdrplus_result_processor.cc
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_result_processor.cc
rename to common/hal/utils/hdrplus_result_processor.cc
diff --git a/common/hal/google_camera_hal/hdrplus_result_processor.h b/common/hal/utils/hdrplus_result_processor.h
similarity index 100%
rename from common/hal/google_camera_hal/hdrplus_result_processor.h
rename to common/hal/utils/hdrplus_result_processor.h
diff --git a/common/hal/google_camera_hal/hwl_buffer_allocator.cc b/common/hal/utils/hwl_buffer_allocator.cc
similarity index 100%
rename from common/hal/google_camera_hal/hwl_buffer_allocator.cc
rename to common/hal/utils/hwl_buffer_allocator.cc
diff --git a/common/hal/google_camera_hal/hwl_buffer_allocator.h b/common/hal/utils/hwl_buffer_allocator.h
similarity index 100%
rename from common/hal/google_camera_hal/hwl_buffer_allocator.h
rename to common/hal/utils/hwl_buffer_allocator.h
diff --git a/common/hal/google_camera_hal/internal_stream_manager.cc b/common/hal/utils/internal_stream_manager.cc
similarity index 92%
rename from common/hal/google_camera_hal/internal_stream_manager.cc
rename to common/hal/utils/internal_stream_manager.cc
index 6dc6490..b132b5b 100644
--- a/common/hal/google_camera_hal/internal_stream_manager.cc
+++ b/common/hal/utils/internal_stream_manager.cc
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#include <cstdint>
 #define LOG_TAG "GCH_InternalStreamManager"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 #include <log/log.h>
@@ -26,8 +27,21 @@
 namespace android {
 namespace google_camera_hal {
 
+namespace {
+int32_t GetNextAvailableStreamId() {
+  static int32_t next_available_stream_id = kHalInternalStreamStart;
+  static std::mutex next_available_stream_id_mutex;
+  int32_t result;
+  {
+    std::lock_guard<std::mutex> lock(next_available_stream_id_mutex);
+    result = next_available_stream_id++;
+  }
+  return result;
+}
+}  // namespace
+
 std::unique_ptr<InternalStreamManager> InternalStreamManager::Create(
-    IHalBufferAllocator* buffer_allocator) {
+    IHalBufferAllocator* buffer_allocator, int partial_result_count) {
   ATRACE_CALL();
   auto stream_manager =
       std::unique_ptr<InternalStreamManager>(new InternalStreamManager());
@@ -36,13 +50,15 @@
     return nullptr;
   }
 
-  stream_manager->Initialize(buffer_allocator);
+  stream_manager->Initialize(buffer_allocator, partial_result_count);
 
   return stream_manager;
 }
 
-void InternalStreamManager::Initialize(IHalBufferAllocator* buffer_allocator) {
+void InternalStreamManager::Initialize(IHalBufferAllocator* buffer_allocator,
+                                       int partial_result_count) {
   hwl_buffer_allocator_ = buffer_allocator;
+  partial_result_count_ = partial_result_count;
 }
 
 status_t InternalStreamManager::IsStreamRegisteredLocked(int32_t stream_id) const {
@@ -87,7 +103,7 @@
   // implementation defined internal stream format. other wise will use the next
   // available unique id.
   if (stream.id < kStreamIdReserve) {
-    id = next_available_stream_id_++;
+    id = GetNextAvailableStreamId();
     internal_stream.id = id;
   }
   registered_streams_[id] = std::move(internal_stream);
@@ -159,7 +175,8 @@
   }
 
   auto buffer_manager = std::make_unique<ZslBufferManager>(
-      need_vendor_buffer ? hwl_buffer_allocator_ : nullptr);
+      need_vendor_buffer ? hwl_buffer_allocator_ : nullptr,
+      partial_result_count_);
   if (buffer_manager == nullptr) {
     ALOGE("%s: Failed to create a buffer manager for stream %d", __FUNCTION__,
           stream_id);
@@ -415,10 +432,15 @@
 status_t InternalStreamManager::GetMostRecentStreamBuffer(
     int32_t stream_id, std::vector<StreamBuffer>* input_buffers,
     std::vector<std::unique_ptr<HalCameraMetadata>>* input_buffer_metadata,
-    uint32_t payload_frames) {
+    uint32_t payload_frames, int32_t min_filled_buffers) {
   ATRACE_CALL();
   std::lock_guard<std::mutex> lock(stream_mutex_);
 
+  if (static_cast<int32_t>(payload_frames) < min_filled_buffers) {
+    ALOGW("%s: payload frames %d is smaller than min filled buffers %d",
+          __FUNCTION__, payload_frames, min_filled_buffers);
+  }
+
   if (!IsStreamAllocatedLocked(stream_id)) {
     ALOGE("%s: Stream %d was not allocated.", __FUNCTION__, stream_id);
     return BAD_VALUE;
@@ -439,7 +461,7 @@
 
   std::vector<ZslBufferManager::ZslBuffer> filled_buffers;
   buffer_managers_[owner_stream_id]->GetMostRecentZslBuffers(
-      &filled_buffers, payload_frames, kMinFilledBuffers);
+      &filled_buffers, payload_frames, min_filled_buffers);
 
   if (filled_buffers.size() == 0) {
     ALOGE("%s: There is no input buffers.", __FUNCTION__);
@@ -543,9 +565,10 @@
                                                                buffer);
 }
 
-status_t InternalStreamManager::ReturnMetadata(
-    int32_t stream_id, uint32_t frame_number,
-    const HalCameraMetadata* metadata) {
+status_t InternalStreamManager::ReturnMetadata(int32_t stream_id,
+                                               uint32_t frame_number,
+                                               const HalCameraMetadata* metadata,
+                                               int partial_result) {
   ATRACE_CALL();
   std::lock_guard<std::mutex> lock(stream_mutex_);
 
@@ -561,8 +584,8 @@
     return BAD_VALUE;
   }
 
-  return buffer_managers_[owner_stream_id]->ReturnMetadata(frame_number,
-                                                           metadata);
+  return buffer_managers_[owner_stream_id]->ReturnMetadata(
+      frame_number, metadata, partial_result);
 }
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/internal_stream_manager.h b/common/hal/utils/internal_stream_manager.h
similarity index 93%
rename from common/hal/google_camera_hal/internal_stream_manager.h
rename to common/hal/utils/internal_stream_manager.h
index 93b8512..99bc684 100644
--- a/common/hal/google_camera_hal/internal_stream_manager.h
+++ b/common/hal/utils/internal_stream_manager.h
@@ -19,12 +19,12 @@
 
 #include <hardware/gralloc.h>
 #include <utils/Errors.h>
+
 #include <unordered_map>
 
 #include "camera_buffer_allocator_hwl.h"
 #include "hal_buffer_allocator.h"
 #include "hal_types.h"
-#include "hwl_buffer_allocator.h"
 #include "zsl_buffer_manager.h"
 
 namespace android {
@@ -35,7 +35,8 @@
 class InternalStreamManager {
  public:
   static std::unique_ptr<InternalStreamManager> Create(
-      IHalBufferAllocator* buffer_allocator = nullptr);
+      IHalBufferAllocator* buffer_allocator = nullptr,
+      int partial_result_count = 1);
   virtual ~InternalStreamManager() = default;
 
   // stream contains the stream info to be registered. if stream.id is smaller
@@ -80,13 +81,14 @@
 
   // Return a metadata to internal stream manager.
   status_t ReturnMetadata(int32_t stream_id, uint32_t frame_number,
-                          const HalCameraMetadata* metadata);
+                          const HalCameraMetadata* metadata,
+                          int partial_result = 1);
 
   // Get the most recent buffer and metadata.
   status_t GetMostRecentStreamBuffer(
       int32_t stream_id, std::vector<StreamBuffer>* input_buffers,
       std::vector<std::unique_ptr<HalCameraMetadata>>* input_buffer_metadata,
-      uint32_t payload_frames);
+      uint32_t payload_frames, int32_t min_filled_buffers = kMinFilledBuffers);
 
   // Return the buffer from GetMostRecentStreamBuffer
   status_t ReturnZslStreamBuffers(uint32_t frame_number, int32_t stream_id);
@@ -102,7 +104,8 @@
   static constexpr int32_t kInvalidStreamId = -1;
 
   // Initialize internal stream manager
-  void Initialize(IHalBufferAllocator* buffer_allocator);
+  void Initialize(IHalBufferAllocator* buffer_allocator,
+                  int partial_result_count);
 
   // Return if a stream is registered. Must be called with stream_mutex_ locked.
   status_t IsStreamRegisteredLocked(int32_t stream_id) const;
@@ -142,9 +145,6 @@
 
   std::mutex stream_mutex_;
 
-  // Next available stream ID. Protected by stream_mutex_.
-  int32_t next_available_stream_id_ = kStreamIdStart;
-
   // Map from stream ID to registered stream. Protected by stream_mutex_.
   std::unordered_map<int32_t, Stream> registered_streams_;
 
@@ -160,6 +160,9 @@
 
   // external buffer allocator
   IHalBufferAllocator* hwl_buffer_allocator_ = nullptr;
+
+  // Partial result count reported by camera HAL
+  int partial_result_count_ = 1;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/google_camera_hal/multicam_realtime_process_block.cc b/common/hal/utils/multicam_realtime_process_block.cc
similarity index 98%
rename from common/hal/google_camera_hal/multicam_realtime_process_block.cc
rename to common/hal/utils/multicam_realtime_process_block.cc
index 9c422fb..fea8f7f 100644
--- a/common/hal/google_camera_hal/multicam_realtime_process_block.cc
+++ b/common/hal/utils/multicam_realtime_process_block.cc
@@ -283,8 +283,7 @@
     }
 
     if (block_request.request.frame_number != frame_number) {
-      ALOGE("%s: Not all frame numbers in requests are the same.",
-            __FUNCTION__);
+      ALOGE("%s: Not all frame numbers in requests are the same.", __FUNCTION__);
       return false;
     }
 
@@ -310,8 +309,7 @@
     }
 
     // Check no two requests will be captured from the same physical camera.
-    if (request_camera_ids.find(physical_camera_id) !=
-        request_camera_ids.end()) {
+    if (request_camera_ids.find(physical_camera_id) != request_camera_ids.end()) {
       ALOGE("%s: No two requests can be captured from the same camera ID (%u).",
             __FUNCTION__, physical_camera_id);
       return false;
@@ -447,8 +445,7 @@
           pipeline_id, frame_number);
     return;
   }
-  auto capture_result =
-      hal_utils::ConvertToCaptureResult(std::move(hwl_result));
+  auto capture_result = hal_utils::ConvertToCaptureResult(std::move(hwl_result));
   if (capture_result == nullptr) {
     ALOGE("%s: Converting to capture result failed.", __FUNCTION__);
     return;
diff --git a/common/hal/google_camera_hal/multicam_realtime_process_block.h b/common/hal/utils/multicam_realtime_process_block.h
similarity index 100%
rename from common/hal/google_camera_hal/multicam_realtime_process_block.h
rename to common/hal/utils/multicam_realtime_process_block.h
diff --git a/common/hal/utils/pipeline_request_id_manager.h b/common/hal/utils/pipeline_request_id_manager.h
index 5fb98d4..1c96e6f 100644
--- a/common/hal/utils/pipeline_request_id_manager.h
+++ b/common/hal/utils/pipeline_request_id_manager.h
@@ -54,8 +54,8 @@
   };
 
   // Default max pending request if max_pending_request isn't provided while
-  // creating class. 32 should cover all the case.
-  static const size_t kDefaultMaxPendingRequest = 32;
+  // creating class. 64 should cover all the case.
+  static const size_t kDefaultMaxPendingRequest = 64;
 
   // Max pending request support in pipeline_request_ids_.
   const size_t kMaxPendingRequest = 0;
diff --git a/common/hal/utils/result_dispatcher.cc b/common/hal/utils/result_dispatcher.cc
index 0dd6e33..6443e8f 100644
--- a/common/hal/utils/result_dispatcher.cc
+++ b/common/hal/utils/result_dispatcher.cc
@@ -228,8 +228,11 @@
       failed = true;
     }
   }
-
-  notify_callback_condition_.notify_one();
+  {
+    std::unique_lock<std::mutex> lock(notify_callback_lock);
+    is_result_shutter_updated_ = true;
+    notify_callback_condition_.notify_one();
+  }
   return failed ? UNKNOWN_ERROR : OK;
 }
 
@@ -256,8 +259,11 @@
 
   shutter_it->second.timestamp_ns = timestamp_ns;
   shutter_it->second.ready = true;
-
-  notify_callback_condition_.notify_one();
+  {
+    std::unique_lock<std::mutex> lock(notify_callback_lock);
+    is_result_shutter_updated_ = true;
+    notify_callback_condition_.notify_one();
+  }
   return OK;
 }
 
@@ -266,7 +272,10 @@
   std::lock_guard<std::mutex> lock(result_lock_);
   uint32_t frame_number = error.frame_number;
   // No need to deliver the shutter message on an error
-  pending_shutters_.erase(frame_number);
+  if (error.error_code == ErrorCode::kErrorDevice ||
+      error.error_code == ErrorCode::kErrorResult) {
+    pending_shutters_.erase(frame_number);
+  }
   // No need to deliver the result metadata on a result metadata error
   if (error.error_code == ErrorCode::kErrorResult) {
     pending_final_metadata_.erase(frame_number);
@@ -291,6 +300,7 @@
   result->physical_metadata = std::move(physical_metadata);
   result->partial_result = partial_result;
 
+  std::lock_guard<std::mutex> lock(process_capture_result_lock_);
   process_capture_result_(std::move(result));
 }
 
@@ -389,12 +399,14 @@
       ALOGV("%s: NotifyCallbackThreadLoop exits.", __FUNCTION__);
       return;
     }
-
-    if (notify_callback_condition_.wait_for(
-            lock, std::chrono::milliseconds(kCallbackThreadTimeoutMs)) ==
-        std::cv_status::timeout) {
-      PrintTimeoutMessages();
+    if (!is_result_shutter_updated_) {
+      if (notify_callback_condition_.wait_for(
+              lock, std::chrono::milliseconds(kCallbackThreadTimeoutMs)) ==
+          std::cv_status::timeout) {
+        PrintTimeoutMessages();
+      }
     }
+    is_result_shutter_updated_ = false;
   }
 }
 
@@ -425,8 +437,6 @@
     return BAD_VALUE;
   }
 
-  std::lock_guard<std::mutex> lock(result_lock_);
-
   auto shutter_it = pending_shutters_.begin();
   if (shutter_it == pending_shutters_.end() || !shutter_it->second.ready) {
     // The first pending shutter is not ready.
@@ -444,8 +454,11 @@
 void ResultDispatcher::NotifyShutters() {
   ATRACE_CALL();
   NotifyMessage message = {};
-
-  while (GetReadyShutterMessage(&message) == OK) {
+  while (true) {
+    std::lock_guard<std::mutex> lock(result_lock_);
+    if (GetReadyShutterMessage(&message) != OK) {
+      break;
+    }
     ALOGV("%s: Notify shutter for frame %u timestamp %" PRIu64, __FUNCTION__,
           message.message.shutter.frame_number,
           message.message.shutter.timestamp_ns);
@@ -540,9 +553,10 @@
       ALOGE("%s: result is nullptr", __FUNCTION__);
       return;
     }
+    std::lock_guard<std::mutex> lock(process_capture_result_lock_);
     process_capture_result_(std::move(result));
   }
 }
 
 }  // namespace google_camera_hal
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/common/hal/utils/result_dispatcher.h b/common/hal/utils/result_dispatcher.h
index 4a0a899..74f4b4a 100644
--- a/common/hal/utils/result_dispatcher.h
+++ b/common/hal/utils/result_dispatcher.h
@@ -173,6 +173,7 @@
   // Protected by result_lock_.
   std::map<uint32_t, PendingFinalResultMetadata> pending_final_metadata_;
 
+  std::mutex process_capture_result_lock_;
   ProcessCaptureResultFunc process_capture_result_;
   NotifyFunc notify_;
 
@@ -186,6 +187,9 @@
 
   // Protected by notify_callback_lock.
   bool notify_callback_thread_exiting = false;
+
+  // State of callback thread is notified or not.
+  volatile bool is_result_shutter_updated_ = false;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/utils/stream_buffer_cache_manager.cc b/common/hal/utils/stream_buffer_cache_manager.cc
index b801b9b..1c9a97c 100644
--- a/common/hal/utils/stream_buffer_cache_manager.cc
+++ b/common/hal/utils/stream_buffer_cache_manager.cc
@@ -161,7 +161,7 @@
     return res;
   }
 
-  stream_buffer_cache->NotifyProviderReadiness();
+  stream_buffer_cache->SetManagerState(/*active=*/true);
 
   NotifyThreadWorkload();
   return OK;
@@ -180,7 +180,7 @@
   {
     std::unique_lock<std::mutex> flush_lock(flush_mutex_);
     for (auto& stream_buffer_cache : stream_buffer_caches) {
-      stream_buffer_cache->NotifyFlushing();
+      stream_buffer_cache->SetManagerState(/*active=*/false);
     }
   }
 
@@ -298,7 +298,7 @@
     bool forced_flushing) {
   status_t res = OK;
   std::unique_lock<std::mutex> cache_lock(cache_access_mutex_);
-  if (forced_flushing || notified_flushing_) {
+  if (forced_flushing || !is_active_) {
     res = FlushLocked(forced_flushing);
     if (res != OK) {
       ALOGE("%s: Failed to flush stream buffer cache for stream %d",
@@ -321,16 +321,15 @@
     StreamBufferRequestResult* res) {
   std::unique_lock<std::mutex> cache_lock(cache_access_mutex_);
 
-  // 0. the provider of the stream for this cache must be ready
-  if (!notified_provider_readiness_) {
-    ALOGW("%s: The provider of stream %d is not ready.", __FUNCTION__,
+  // 0. the buffer cache must be active
+  if (!is_active_) {
+    ALOGW("%s: The buffer cache for stream %d is not active.", __FUNCTION__,
           cache_info_.stream_id);
     return INVALID_OPERATION;
   }
 
-  // 1. check if the cache is deactived or the stream has been notified for
-  // flushing.
-  if (stream_deactived_ || notified_flushing_) {
+  // 1. check if the cache is deactived
+  if (stream_deactived_) {
     res->is_dummy_buffer = true;
     res->buffer = dummy_buffer_;
     return OK;
@@ -386,24 +385,19 @@
   return stream_deactived_;
 }
 
-void StreamBufferCacheManager::StreamBufferCache::NotifyProviderReadiness() {
+void StreamBufferCacheManager::StreamBufferCache::SetManagerState(bool active) {
   std::unique_lock<std::mutex> lock(cache_access_mutex_);
-  notified_provider_readiness_ = true;
-}
-
-void StreamBufferCacheManager::StreamBufferCache::NotifyFlushing() {
-  std::unique_lock<std::mutex> lock(cache_access_mutex_);
-  notified_flushing_ = true;
+  is_active_ = active;
 }
 
 status_t StreamBufferCacheManager::StreamBufferCache::FlushLocked(
     bool forced_flushing) {
-  if (notified_flushing_ != true && !forced_flushing) {
-    ALOGI("%s: Stream buffer cache is not notified for flushing.", __FUNCTION__);
+  if (is_active_ && !forced_flushing) {
+    ALOGI("%s: Active stream buffer cache is not notified for forced flushing.",
+          __FUNCTION__);
     return INVALID_OPERATION;
   }
 
-  notified_flushing_ = false;
   if (cache_info_.return_func == nullptr) {
     ALOGE("%s: return_func is nullptr.", __FUNCTION__);
     return UNKNOWN_ERROR;
@@ -436,14 +430,13 @@
       return UNKNOWN_ERROR;
     }
 
-    if (!notified_provider_readiness_) {
-      ALOGI("%s: Provider is not ready.", __FUNCTION__);
+    if (!is_active_) {
+      ALOGI("%s: Buffer cache is not active.", __FUNCTION__);
       return UNKNOWN_ERROR;
     }
 
-    if (stream_deactived_ || notified_flushing_) {
-      ALOGI("%s: Already notified for flushing or stream already deactived.",
-            __FUNCTION__);
+    if (stream_deactived_) {
+      ALOGI("%s: Stream already deactived.", __FUNCTION__);
       return OK;
     }
 
@@ -511,13 +504,8 @@
 }
 
 bool StreamBufferCacheManager::StreamBufferCache::RefillableLocked() const {
-  // No need to refill if the provider is not ready
-  if (!notified_provider_readiness_) {
-    return false;
-  }
-
-  // No need to refill if the stream buffer cache is notified for flushing
-  if (notified_flushing_) {
+  // No need to refill if the buffer cache is not active
+  if (!is_active_) {
     return false;
   }
 
diff --git a/common/hal/utils/stream_buffer_cache_manager.h b/common/hal/utils/stream_buffer_cache_manager.h
index c6f127f..89ad95a 100644
--- a/common/hal/utils/stream_buffer_cache_manager.h
+++ b/common/hal/utils/stream_buffer_cache_manager.h
@@ -181,13 +181,15 @@
     // in which case, the is_dummy_buffer field in res will be true.
     status_t GetBuffer(StreamBufferRequestResult* res);
 
-    // Notify provider readiness. Client should call this function before
-    // calling Refill and GetBuffer.
-    void NotifyProviderReadiness();
-
-    // Notify the stream buffer cache to flush all buffers it acquire from the
-    // provider.
-    void NotifyFlushing();
+    // Activate or deactivate the stream buffer cache manager. The stream
+    // buffer cache manager needs to be active before calling Refill and
+    // GetBuffer. If no inflight buffer needs to be maintained, this function is
+    // called with false to flush all buffers acquired from the provider. The
+    // Stream buffer cache manager should only be invoked when the buffer
+    // provider (the camera service/framework) is ready to provide buffers (e.g.
+    // when the first capture request arrives). Similarly, it should be
+    // deactivated in cases like when the framework asks for a flush.
+    void SetManagerState(bool active);
 
     // Return whether the stream that this cache is for has been deactivated
     bool IsStreamDeactivated();
@@ -250,17 +252,12 @@
     // is returned, the is_dummy_buffer_ flag in the BufferRequestResult must be
     // set to true.
     StreamBuffer dummy_buffer_;
-    // Whether this stream has been notified by the client for flushing. This
-    // flag should be cleared after StreamBufferCache is flushed. Once this is
-    // flagged by the client, StreamBufferCacheManager will return all buffers
-    // acquired from provider for this cache the next time the dedicated thread
-    // processes any request/return workload.
-    bool notified_flushing_ = false;
-    // StreamBufferCacheManager does not refill a StreamBufferCache until this is
-    // notified by the client. Client should notify this after the buffer provider
-    // (e.g. framework) is ready to handle buffer requests. Usually, this is set
-    // once in a session and will not be cleared in the same session.
-    bool notified_provider_readiness_ = false;
+    // StreamBufferCacheManager does not refill a StreamBufferCache until this
+    // is set true by the client. Client should set this flag to true after the
+    // buffer provider (e.g. framework) is ready to handle buffer requests, or
+    // when a new request is submitted for an idle camera device (no inflight
+    // requests).
+    bool is_active_ = false;
     // Interface to notify the parent manager for new threadloop workload.
     NotifyManagerThreadWorkloadFunc notify_for_workload_ = nullptr;
     // Allocator of the dummy buffer for this stream. The stream buffer cache
@@ -335,4 +332,4 @@
 }  // namespace google_camera_hal
 }  // namespace android
 
-#endif  // HARDWARE_GOOGLE_CAMERA_HAL_UTILS_STREAM_BUFFER_CACHE_MANAGER_H_
\ No newline at end of file
+#endif  // HARDWARE_GOOGLE_CAMERA_HAL_UTILS_STREAM_BUFFER_CACHE_MANAGER_H_
diff --git a/common/hal/utils/utils.cc b/common/hal/utils/utils.cc
index 24d0063..54b382a 100644
--- a/common/hal/utils/utils.cc
+++ b/common/hal/utils/utils.cc
@@ -20,7 +20,10 @@
 #include "utils.h"
 
 #include <cutils/properties.h>
+#include <dirent.h>
+#include <dlfcn.h>
 #include <hardware/gralloc.h>
+#include <sys/stat.h>
 
 #include "vendor_tag_defs.h"
 
@@ -28,7 +31,8 @@
 namespace google_camera_hal {
 namespace utils {
 
-const std::string kRealtimeThreadSetProp = "persist.camera.realtimethread";
+constexpr char kRealtimeThreadSetProp[] =
+    "persist.vendor.camera.realtimethread";
 
 bool IsDepthStream(const Stream& stream) {
   if (stream.stream_type == StreamType::kOutput &&
@@ -139,21 +143,42 @@
   return OK;
 }
 
+bool HasCapability(const HalCameraMetadata* metadata, uint8_t capability) {
+  if (metadata == nullptr) {
+    return false;
+  }
+
+  camera_metadata_ro_entry_t entry;
+  auto ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
+  if (ret != OK) {
+    return false;
+  }
+  for (size_t i = 0; i < entry.count; i++) {
+    if (entry.data.u8[i] == capability) {
+      return true;
+    }
+  }
+  return false;
+}
+
 status_t GetSensorActiveArraySize(const HalCameraMetadata* characteristics,
-                                  Rect* active_array) {
+                                  Rect* active_array, bool maximum_resolution) {
   if (characteristics == nullptr || active_array == nullptr) {
     ALOGE("%s: characteristics or active_array is nullptr", __FUNCTION__);
     return BAD_VALUE;
   }
-
+  uint32_t active_array_tag =
+      maximum_resolution
+          ? ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+          : ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE;
   camera_metadata_ro_entry entry;
-  status_t res =
-      characteristics->Get(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &entry);
+  status_t res = characteristics->Get(active_array_tag, &entry);
   if (res != OK || entry.count != 4) {
     ALOGE(
         "%s: Getting ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE failed: %s(%d) "
-        "count: %zu",
-        __FUNCTION__, strerror(-res), res, entry.count);
+        "count: %zu max resolution ? %s",
+        __FUNCTION__, strerror(-res), res, entry.count,
+        maximum_resolution ? "true" : "false");
     return res;
   }
 
@@ -388,7 +413,7 @@
   static bool first_time = false;
   if (first_time == false) {
     first_time = true;
-    support_real_time = property_get_bool(kRealtimeThreadSetProp.c_str(), false);
+    support_real_time = property_get_bool(kRealtimeThreadSetProp, false);
   }
 
   return support_real_time;
@@ -423,6 +448,33 @@
   return OK;
 }
 
+// Returns an array of regular files under dir_path.
+std::vector<std::string> FindLibraryPaths(const char* dir_path) {
+  std::vector<std::string> libs;
+
+  errno = 0;
+  DIR* dir = opendir(dir_path);
+  if (!dir) {
+    ALOGD("%s: Unable to open directory %s (%s)", __FUNCTION__, dir_path,
+          strerror(errno));
+    return libs;
+  }
+
+  struct dirent* entry = nullptr;
+  while ((entry = readdir(dir)) != nullptr) {
+    std::string lib_path(dir_path);
+    lib_path += entry->d_name;
+    struct stat st;
+    if (stat(lib_path.c_str(), &st) == 0) {
+      if (S_ISREG(st.st_mode)) {
+        libs.push_back(lib_path);
+      }
+    }
+  }
+
+  return libs;
+}
+
 }  // namespace utils
 }  // namespace google_camera_hal
 }  // namespace android
diff --git a/common/hal/utils/utils.h b/common/hal/utils/utils.h
index f79773f..4412908 100644
--- a/common/hal/utils/utils.h
+++ b/common/hal/utils/utils.h
@@ -18,7 +18,9 @@
 #define HARDWARE_GOOGLE_CAMERA_HAL_UTILS_UTILS_H_
 
 #include <log/log.h>
+
 #include <utility>
+
 #include "hal_types.h"
 
 namespace android {
@@ -35,11 +37,14 @@
 bool IsDepthStream(const Stream& stream);
 bool IsOutputZslStream(const Stream& stream);
 
+bool HasCapability(const HalCameraMetadata* metadata, uint8_t capability);
+
 status_t GetSensorPhysicalSize(const HalCameraMetadata* characteristics,
                                float* width, float* height);
 
 status_t GetSensorActiveArraySize(const HalCameraMetadata* characteristics,
-                                  Rect* active_array);
+                                  Rect* active_array,
+                                  bool maximum_resolution = false);
 
 status_t GetSensorPixelArraySize(const HalCameraMetadata* characteristics,
                                  Dimension* pixel_array);
@@ -131,6 +136,8 @@
   ClampBoundary(active_array_dimension, x, y, width, height);
 }
 
+std::vector<std::string> FindLibraryPaths(const char* dir_path);
+
 }  // namespace utils
 }  // namespace google_camera_hal
 }  // namespace android
diff --git a/common/hal/utils/zoom_ratio_mapper.cc b/common/hal/utils/zoom_ratio_mapper.cc
index a05604e..6e74bc1 100644
--- a/common/hal/utils/zoom_ratio_mapper.cc
+++ b/common/hal/utils/zoom_ratio_mapper.cc
@@ -47,6 +47,7 @@
          sizeof(zoom_ratio_range_));
   zoom_ratio_mapper_hwl_ = std::move(params->zoom_ratio_mapper_hwl);
   is_zoom_ratio_supported_ = true;
+  camera_id_ = params->camera_id;
 }
 
 void ZoomRatioMapper::UpdateCaptureRequest(CaptureRequest* request) {
@@ -61,7 +62,14 @@
   }
 
   if (request->settings != nullptr) {
-    ApplyZoomRatio(active_array_dimension_, true, request->settings.get());
+    Dimension override_dimension;
+    Dimension active_array_dimension_chosen = active_array_dimension_;
+    if (zoom_ratio_mapper_hwl_ &&
+        zoom_ratio_mapper_hwl_->GetActiveArrayDimensionToBeUsed(
+            camera_id_, request->settings.get(), &override_dimension)) {
+      active_array_dimension_chosen = override_dimension;
+    }
+    ApplyZoomRatio(active_array_dimension_chosen, true, request->settings.get());
   }
 
   for (auto& [camera_id, metadata] : request->physical_camera_settings) {
@@ -73,7 +81,13 @@
               camera_id);
         continue;
       }
+      Dimension override_dimension;
       Dimension physical_active_array_dimension = physical_cam_iter->second;
+      if (zoom_ratio_mapper_hwl_ &&
+          zoom_ratio_mapper_hwl_->GetActiveArrayDimensionToBeUsed(
+              camera_id, metadata.get(), &override_dimension)) {
+        physical_active_array_dimension = override_dimension;
+      }
       ApplyZoomRatio(physical_active_array_dimension, true, metadata.get());
     }
   }
@@ -94,7 +108,14 @@
   }
 
   if (result->result_metadata != nullptr) {
-    ApplyZoomRatio(active_array_dimension_, false,
+    Dimension override_dimension;
+    Dimension active_array_dimension_chosen = active_array_dimension_;
+    if (zoom_ratio_mapper_hwl_ &&
+        zoom_ratio_mapper_hwl_->GetActiveArrayDimensionToBeUsed(
+            camera_id_, result->result_metadata.get(), &override_dimension)) {
+      active_array_dimension_chosen = override_dimension;
+    }
+    ApplyZoomRatio(active_array_dimension_chosen, false,
                    result->result_metadata.get());
   }
 
@@ -107,7 +128,13 @@
               camera_id);
         continue;
       }
+      Dimension override_dimension;
       Dimension physical_active_array_dimension = physical_cam_iter->second;
+      if (zoom_ratio_mapper_hwl_ &&
+          zoom_ratio_mapper_hwl_->GetActiveArrayDimensionToBeUsed(
+              camera_id, metadata.get(), &override_dimension)) {
+        physical_active_array_dimension = override_dimension;
+      }
       ApplyZoomRatio(physical_active_array_dimension, false, metadata.get());
     }
   }
@@ -127,7 +154,7 @@
   camera_metadata_ro_entry entry = {};
   status_t res = metadata->Get(ANDROID_CONTROL_ZOOM_RATIO, &entry);
   if (res != OK) {
-    ALOGE("%s: Failed to get the zoom ratio", __FUNCTION__);
+    ALOGV("%s: zoom ratio doesn't exist, cancel the conversion", __FUNCTION__);
     return;
   }
   float zoom_ratio = entry.data.f[0];
diff --git a/common/hal/utils/zoom_ratio_mapper.h b/common/hal/utils/zoom_ratio_mapper.h
index 6bba6e2..4fbee2f 100644
--- a/common/hal/utils/zoom_ratio_mapper.h
+++ b/common/hal/utils/zoom_ratio_mapper.h
@@ -30,6 +30,7 @@
     std::unordered_map<uint32_t, Dimension> physical_cam_active_array_dimension;
     ZoomRatioRange zoom_ratio_range;
     std::unique_ptr<ZoomRatioMapperHwl> zoom_ratio_mapper_hwl;
+    uint32_t camera_id;
   };
 
   void Initialize(InitParams* params);
@@ -76,6 +77,8 @@
   bool is_zoom_ratio_supported_ = false;
 
   std::unique_ptr<ZoomRatioMapperHwl> zoom_ratio_mapper_hwl_;
+
+  uint32_t camera_id_;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/hal/utils/zsl_buffer_manager.cc b/common/hal/utils/zsl_buffer_manager.cc
index 22e1ee8..060e335 100644
--- a/common/hal/utils/zsl_buffer_manager.cc
+++ b/common/hal/utils/zsl_buffer_manager.cc
@@ -28,10 +28,12 @@
 namespace android {
 namespace google_camera_hal {
 
-ZslBufferManager::ZslBufferManager(IHalBufferAllocator* allocator)
+ZslBufferManager::ZslBufferManager(IHalBufferAllocator* allocator,
+                                   int partial_result_count)
     : kMemoryProfilingEnabled(
-          property_get_bool("persist.camera.hal.memoryprofile", false)),
-      buffer_allocator_(allocator) {
+          property_get_bool("persist.vendor.camera.hal.memoryprofile", false)),
+      buffer_allocator_(allocator),
+      partial_result_count_(partial_result_count) {
 }
 
 ZslBufferManager::~ZslBufferManager() {
@@ -159,7 +161,7 @@
   } else if (partially_filled_zsl_buffers_.size() > 0) {
     auto buffer_iter = partially_filled_zsl_buffers_.begin();
     while (buffer_iter != partially_filled_zsl_buffers_.end()) {
-      if (buffer_iter->second.metadata != nullptr) {
+      if (buffer_iter->second.partial_result == partial_result_count_) {
         if (buffer_iter->second.buffer.buffer != kInvalidBufferHandle) {
           ALOGE("%s: Invalid: both are ready in partial zsl queue.",
                 __FUNCTION__);
@@ -170,7 +172,8 @@
             "%s: Remove metadata-only buffer in partially filled zsl "
             "buffer queue. Releasing the metadata resource.",
             __FUNCTION__);
-      } else if (buffer_iter->second.buffer.buffer == kInvalidBufferHandle) {
+      } else if (buffer_iter->second.buffer.buffer == kInvalidBufferHandle &&
+                 buffer_iter->second.partial_result == 0) {
         ALOGE(
             "%s: Invalid: both buffer and metadata are empty in partial "
             "zsl queue.",
@@ -287,17 +290,18 @@
     zsl_buffer.metadata = nullptr;
     partially_filled_zsl_buffers_[frame_number] = std::move(zsl_buffer);
   } else if (partially_filled_zsl_buffers_[frame_number].buffer.buffer ==
-                 kInvalidBufferHandle &&
-             partially_filled_zsl_buffers_[frame_number].metadata != nullptr) {
-    ALOGV(
-        "%s: both buffer and metadata for frame[%u] are ready. Move to "
-        "filled_zsl_buffers_.",
-        __FUNCTION__, frame_number);
-
-    zsl_buffer.metadata =
-        std::move(partially_filled_zsl_buffers_[frame_number].metadata);
-    filled_zsl_buffers_[frame_number] = std::move(zsl_buffer);
-    partially_filled_zsl_buffers_.erase(frame_number);
+             kInvalidBufferHandle) {
+    partially_filled_zsl_buffers_[frame_number].buffer = buffer;
+    if (partially_filled_zsl_buffers_[frame_number].partial_result ==
+        partial_result_count_) {
+      ALOGV(
+          "%s: both buffer and metadata for frame[%u] are ready. Move to "
+          "filled_zsl_buffers_.",
+          __FUNCTION__, frame_number);
+      filled_zsl_buffers_[frame_number] =
+          std::move(partially_filled_zsl_buffers_[frame_number]);
+      partially_filled_zsl_buffers_.erase(frame_number);
+    }
   } else {
     ALOGE(
         "%s: the buffer for frame[%u] already returned or the metadata is "
@@ -310,21 +314,18 @@
 }
 
 status_t ZslBufferManager::ReturnMetadata(uint32_t frame_number,
-                                          const HalCameraMetadata* metadata) {
+                                          const HalCameraMetadata* metadata,
+                                          int partial_result) {
   ATRACE_CALL();
   std::unique_lock<std::mutex> lock(zsl_buffers_lock_);
 
   ZslBuffer zsl_buffer = {};
   zsl_buffer.frame_number = frame_number;
-  zsl_buffer.metadata = HalCameraMetadata::Clone(metadata);
-  if (zsl_buffer.metadata == nullptr) {
-    ALOGE("%s: Failed to Clone camera metadata.", __FUNCTION__);
-    return NO_MEMORY;
-  }
+  zsl_buffer.partial_result = partial_result;
 
-  if (partially_filled_zsl_buffers_.empty() ||
-      partially_filled_zsl_buffers_.find(frame_number) ==
-          partially_filled_zsl_buffers_.end()) {
+  auto partially_filled_buffer_it =
+      partially_filled_zsl_buffers_.find(frame_number);
+  if (partially_filled_buffer_it == partially_filled_zsl_buffers_.end()) {
     // not able to distinguish these two cases through the current status of
     // the partial buffer
     ALOGV(
@@ -333,24 +334,50 @@
         __FUNCTION__, frame_number);
 
     zsl_buffer.buffer = {};
+    zsl_buffer.metadata = HalCameraMetadata::Clone(metadata);
+    if (zsl_buffer.metadata == nullptr) {
+      ALOGE("%s: Failed to Clone camera metadata.", __FUNCTION__);
+      return NO_MEMORY;
+    }
     partially_filled_zsl_buffers_[frame_number] = std::move(zsl_buffer);
-  } else if (partially_filled_zsl_buffers_[frame_number].metadata == nullptr &&
-             partially_filled_zsl_buffers_[frame_number].buffer.buffer !=
-                 kInvalidBufferHandle) {
-    ALOGV(
-        "%s: both buffer and metadata for frame[%u] are ready. Move to "
-        "filled_zsl_buffers_.",
-        __FUNCTION__, frame_number);
-
-    zsl_buffer.buffer = partially_filled_zsl_buffers_[frame_number].buffer;
-    filled_zsl_buffers_[frame_number] = std::move(zsl_buffer);
-    partially_filled_zsl_buffers_.erase(frame_number);
+  } else if (partial_result < partial_result_count_) {
+    partially_filled_buffer_it->second.partial_result = partial_result;
+    // Need to wait for more partial results
+    if (partially_filled_buffer_it->second.metadata == nullptr) {
+      // This is the first partial result, clone to create an entry
+      partially_filled_buffer_it->second.metadata =
+          HalCameraMetadata::Clone(metadata);
+      if (partially_filled_buffer_it->second.metadata == nullptr) {
+        ALOGE("%s: Failed to Clone camera metadata.", __FUNCTION__);
+        return NO_MEMORY;
+      }
+    } else {
+      // Append to previously received partial results
+      partially_filled_buffer_it->second.metadata->Append(
+          metadata->GetRawCameraMetadata());
+    }
   } else {
-    ALOGE(
-        "%s: the metadata for frame[%u] already returned or the buffer is "
-        "missing.",
-        __FUNCTION__, frame_number);
-    return INVALID_OPERATION;
+    zsl_buffer.buffer = partially_filled_buffer_it->second.buffer;
+    partially_filled_buffer_it->second.partial_result = partial_result;
+    if (partially_filled_buffer_it->second.metadata == nullptr) {
+      // This will happen if partial_result_count_ == 1
+      partially_filled_buffer_it->second.metadata =
+          HalCameraMetadata::Clone(metadata);
+    } else {
+      // This is the last partial result, append it to the others
+      partially_filled_buffer_it->second.metadata->Append(
+          metadata->GetRawCameraMetadata());
+    }
+    if (partially_filled_buffer_it->second.buffer.buffer !=
+        kInvalidBufferHandle) {
+      ALOGV(
+          "%s: both buffer and metadata for frame[%u] are ready. Move to "
+          "filled_zsl_buffers_.",
+          __FUNCTION__, frame_number);
+      filled_zsl_buffers_[frame_number] =
+          std::move(partially_filled_buffer_it->second);
+      partially_filled_zsl_buffers_.erase(frame_number);
+    }
   }
 
   if (partially_filled_zsl_buffers_.size() > kMaxPartialZslBuffers) {
@@ -446,9 +473,10 @@
     // Only include recent buffers.
     if (current_timestamp - buffer_timestamp < kMaxBufferTimestampDiff) {
       zsl_buffers->push_back(std::move(zsl_buffer_iter->second));
+      zsl_buffer_iter = filled_zsl_buffers_.erase(zsl_buffer_iter);
+    } else {
+      zsl_buffer_iter++;
     }
-
-    zsl_buffer_iter = filled_zsl_buffers_.erase(zsl_buffer_iter);
   }
 }
 
diff --git a/common/hal/utils/zsl_buffer_manager.h b/common/hal/utils/zsl_buffer_manager.h
index 5deb6eb..6dec8e7 100644
--- a/common/hal/utils/zsl_buffer_manager.h
+++ b/common/hal/utils/zsl_buffer_manager.h
@@ -39,7 +39,8 @@
  public:
   // allocator will be used to allocate buffers. If allocator is nullptr,
   // GrallocBufferAllocator will be used to allocate buffers.
-  ZslBufferManager(IHalBufferAllocator* allocator = nullptr);
+  ZslBufferManager(IHalBufferAllocator* allocator = nullptr,
+                   int partial_result_count = 1);
   virtual ~ZslBufferManager();
 
   // Defines a ZSL buffer.
@@ -50,6 +51,8 @@
     StreamBuffer buffer;
     // Original result metadata of this ZSL buffer captured by HAL.
     std::unique_ptr<HalCameraMetadata> metadata;
+    // Last partial result received
+    int partial_result = 0;
   };
 
   // Allocate buffers. This can only be called once.
@@ -73,7 +76,7 @@
   // ZSL buffer manager will make a copy of metadata.
   // The caller still owns metadata.
   status_t ReturnMetadata(uint32_t frame_number,
-                          const HalCameraMetadata* metadata);
+                          const HalCameraMetadata* metadata, int partial_result);
 
   // Get a number of the most recent ZSL buffers.
   // If numBuffers is larger than available ZSL buffers,
@@ -181,6 +184,9 @@
 
   // Count the number when there are enough unused buffers.
   uint32_t idle_buffer_frame_counter_ = 0;
+
+  // Partial result count reported by camera HAL
+  int partial_result_count_ = 1;
 };
 
 }  // namespace google_camera_hal
diff --git a/common/lib_depth_generator/Android.bp b/common/lib_depth_generator/Android.bp
index d4518cc..8c9ce11 100644
--- a/common/lib_depth_generator/Android.bp
+++ b/common/lib_depth_generator/Android.bp
@@ -25,7 +25,7 @@
 
 cc_library_headers {
     name: "lib_depth_generator_headers",
-    vendor_available: true,
+    vendor: true,
     export_include_dirs: [
         ".",
     ],
diff --git a/common/profiler/Android.bp b/common/profiler/Android.bp
index 93b141a..a8a6f1f 100644
--- a/common/profiler/Android.bp
+++ b/common/profiler/Android.bp
@@ -23,8 +23,26 @@
     default_applicable_licenses: ["hardware_google_camera_license"],
 }
 
+cc_library_static {
+    name: "lib_profiler_proto",
+    srcs: [
+        "profiler.proto",
+    ],
+    rtti: false,
+    proto: {
+        type: "lite",
+        canonical_path_from_root: true,
+        export_proto_headers: true,
+    },
+
+    owner: "google",
+    vendor: true,
+    host_supported: true,
+}
+
 cc_library_shared {
-    name: "lib_profiler",
+    name:
+      "lib_profiler",
 
     srcs: [
         "profiler.cc",
@@ -40,11 +58,20 @@
     shared_libs: [
         "libcutils",
         "liblog",
+        "libprotobuf-cpp-full",
         "libutils",
     ],
 
+    whole_static_libs: [
+        "lib_profiler_proto",
+    ],
+    export_static_lib_headers: [
+        "lib_profiler_proto",
+    ],
     export_include_dirs: ["."],
 
     owner: "google",
-    vendor_available: true,
+    vendor: true,
+    host_supported: true,
+    rtti: true,
 }
diff --git a/common/profiler/OWNERS b/common/profiler/OWNERS
index e2810f4..994df59 100644
--- a/common/profiler/OWNERS
+++ b/common/profiler/OWNERS
@@ -1,3 +1,6 @@
 lunc@google.com
-ckliang@google.com
-zhijunhe@google.com
+cychen@google.com
+pingchienliu@google.com
+ianchien@google.com
+khakisung@google.com
+vincechiu@google.com
diff --git a/common/profiler/profiler.cc b/common/profiler/profiler.cc
index 9a5ac69..31fdaa6 100644
--- a/common/profiler/profiler.cc
+++ b/common/profiler/profiler.cc
@@ -16,6 +16,7 @@
 #include "profiler.h"
 
 #include <cutils/properties.h>
+#include <hardware/google/camera/common/profiler/profiler.pb.h>
 #include <log/log.h>
 #include <sys/stat.h>
 
@@ -32,11 +33,23 @@
 #undef LOG_TAG
 #define LOG_TAG "profiler"
 
+float StandardDeviation(std::vector<float> samples, float mean) {
+  int size = samples.size();
+
+  double sum = 0;
+  for (int i = 0; i < size; i++) {
+    sum += pow((samples[i] - mean), 2);
+  }
+
+  return static_cast<float>(sqrt(sum / (size - 1)));
+}
+
 // Profiler implementatoin.
 class ProfilerImpl : public Profiler {
  public:
-  ProfilerImpl(SetPropFlag setting) : setting_(setting){
-    object_init_time_ = CurrentTime();
+  ProfilerImpl(SetPropFlag setting) : setting_(setting) {
+    object_init_real_time_ = GetRealTimeNs();
+    object_init_boot_time_ = GetBootTimeNs();
   };
   ~ProfilerImpl();
 
@@ -52,7 +65,7 @@
   //  dump_file_prefix: file prefix name. In the current setting,
   //    "/data/vendor/camera/" is a valid folder for camera to dump file.
   //    A valid prefix can be "/data/vendor/camera/test_prefix_".
-  void SetDumpFilePrefix(std::string dump_file_prefix) override final;
+  void SetDumpFilePrefix(const std::string& dump_file_prefix) override final;
 
   // Start to profile.
   // We use start and end to choose which code snippet to be profile.
@@ -61,46 +74,64 @@
   // Arguments:
   //   name: the name of the node to be profiled.
   //   request_id: frame requesd id.
-  void Start(const std::string name,
+  void Start(const std::string& name,
              int request_id = kInvalidRequestId) override final;
 
   // End the profileing.
   // Arguments:
   //   name: the name of the node to be profiled. Should be the same in Start().
   //   request_id: frame requesd id.
-  void End(const std::string name,
+  void End(const std::string& name,
            int request_id = kInvalidRequestId) override final;
 
   // Print out the profiling result in the standard output (ANDROID_LOG_ERROR).
-  virtual void PrintResult() override;
+  void PrintResult() override;
+
+  // Profile the frame rate
+  void ProfileFrameRate(const std::string&) override final;
+
+  // Set the interval of FPS print
+  // The unit is second and interval_seconds must >= 1
+  void SetFpsPrintInterval(int32_t interval_seconds) override final;
+
+  // Get the latency associated with the name
+  int64_t GetLatencyInNanoseconds(const std::string& name,
+                                  int request_id) override final;
 
  protected:
   // A structure to hold start time, end time, and count of profiling code
   // snippet.
   struct TimeSlot {
-    int64_t start;
-    int64_t end;
-    int32_t count;
-    TimeSlot() : start(0), end(0), count(0) {
-    }
+    int64_t start = 0;
+    int64_t end = 0;
+    int32_t count = 0;
+    int32_t request_id = 0;
   };
 
   // A structure to store node's profiling result.
   struct TimeResult {
     std::string node_name;
+    float min_dt;
     float max_dt;
     float avg_dt;
     float avg_count;
-    TimeResult(std::string node_name, float max_dt, float avg_dt, float count)
+    float fps;
+    float mean_max_stddevs;
+    TimeResult(std::string node_name, float min_dt, float max_dt, float avg_dt,
+               float count, float fps, float mean_max_stddevs)
         : node_name(node_name),
+          min_dt(min_dt),
           max_dt(max_dt),
           avg_dt(avg_dt),
-          avg_count(count) {
+          avg_count(count),
+          fps(fps),
+          mean_max_stddevs(mean_max_stddevs) {
     }
   };
 
   using TimeSeries = std::vector<TimeSlot>;
   using NodeTimingMap = std::unordered_map<std::string, TimeSeries>;
+  using NodeFrameRateMap = std::unordered_map<std::string, TimeSlot>;
 
   static constexpr int64_t kNsPerSec = 1000000000;
   static constexpr float kNanoToMilli = 0.000001f;
@@ -109,6 +140,10 @@
   SetPropFlag setting_;
   // The map to record the timing of all nodes.
   NodeTimingMap timing_map_;
+  // The map to record the timing to print fps when close.
+  NodeFrameRateMap frame_rate_map_;
+  // The map to record the timing to print fps per second.
+  NodeFrameRateMap realtime_frame_rate_map_;
   // Use case name.
   std::string use_case_;
   // The prefix for the dump filename.
@@ -116,8 +151,8 @@
   // Mutex lock.
   std::mutex lock_;
 
-  // Get boot time.
-  int64_t CurrentTime() const {
+  // Get clock boot time.
+  int64_t GetBootTimeNs() const {
     if (timespec now; clock_gettime(CLOCK_BOOTTIME, &now) == 0) {
       return now.tv_sec * kNsPerSec + now.tv_nsec;
     } else {
@@ -125,17 +160,40 @@
       return -1;
     }
   }
+  // Get clock real time.
+  int64_t GetRealTimeNs() const {
+    if (timespec now; clock_gettime(CLOCK_REALTIME, &now) == 0) {
+      return now.tv_sec * kNsPerSec + now.tv_nsec;
+    } else {
+      ALOGE("clock_gettime failed");
+      return -1;
+    }
+  }
 
-  // Timestamp of the class object initialized.
-  int64_t object_init_time_;
+  // Timestamp of the class object initialized using CLOCK_BOOTTIME.
+  int64_t object_init_boot_time_;
+  // Timestamp of the class object initialized using CLOCK_REALTIME.
+  int64_t object_init_real_time_;
 
   // Create folder if not exist.
-  void CreateFolder(std::string folder_path);
+  void CreateFolder(const std::string& folder_path);
 
   // Dump the result to the disk.
   // Argument:
   //   filepath: file path to dump file.
-  void DumpResult(std::string filepath);
+  virtual void DumpResult(const std::string& filepath);
+
+  // Dump result in text format.
+  void DumpTxt(std::string_view filepath);
+
+  // Dump result in proto binary format.
+  void DumpPb(std::string_view filepath);
+
+  // Dump result format extension: proto or text.
+  constexpr static char kStrPb[] = ".pb";
+  constexpr static char kStrTxt[] = ".txt";
+
+  int32_t fps_print_interval_seconds_ = 1;
 };
 
 ProfilerImpl::~ProfilerImpl() {
@@ -146,12 +204,20 @@
     PrintResult();
   }
   if (setting_ & SetPropFlag::kDumpBit) {
-    DumpResult(dump_file_prefix_ + use_case_ + "-TS" +
-        std::to_string(object_init_time_) + ".txt");
+    std::string filename = std::to_string(object_init_real_time_);
+    DumpResult(dump_file_prefix_ + use_case_ + "-TS" + filename);
   }
 }
 
-void ProfilerImpl::CreateFolder(std::string folder_path) {
+void ProfilerImpl::DumpResult(const std::string& filepath) {
+  if (setting_ & SetPropFlag::kProto) {
+    DumpPb(filepath + kStrPb);
+  } else {
+    DumpTxt(filepath + kStrTxt);
+  }
+}
+
+void ProfilerImpl::CreateFolder(const std::string& folder_path) {
   struct stat folder_stat;
   memset(&folder_stat, 0, sizeof(folder_stat));
   if (stat(folder_path.c_str(), &folder_stat) != 0) {
@@ -162,7 +228,7 @@
   }
 }
 
-void ProfilerImpl::SetDumpFilePrefix(std::string dump_file_prefix) {
+void ProfilerImpl::SetDumpFilePrefix(const std::string& dump_file_prefix) {
   dump_file_prefix_ = dump_file_prefix;
   if (setting_ & SetPropFlag::kDumpBit) {
     if (auto index = dump_file_prefix_.rfind('/'); index != std::string::npos) {
@@ -171,57 +237,128 @@
   }
 }
 
-void ProfilerImpl::Start(const std::string name, int request_id) {
-  if (setting_ == SetPropFlag::kDisable) {
+void ProfilerImpl::SetFpsPrintInterval(int32_t interval_seconds) {
+  if (interval_seconds < 1) {
+    ALOGE("Wrong interval: %d, must >= 1", interval_seconds);
     return;
   }
-  int index = (request_id == kInvalidRequestId ? 0 : request_id);
-
-  std::lock_guard<std::mutex> lk(lock_);
-  TimeSeries& time_series = timing_map_[name];
-  for (int i = time_series.size(); i <= index; i++) {
-    time_series.push_back(TimeSlot());
-  }
-  TimeSlot& slot = time_series[index];
-  slot.start += CurrentTime();
+  fps_print_interval_seconds_ = interval_seconds;
 }
 
-void ProfilerImpl::End(const std::string name, int request_id) {
+void ProfilerImpl::ProfileFrameRate(const std::string& name) {
+  std::lock_guard<std::mutex> lk(lock_);
+  // Save the timeing for each whole process
+  TimeSlot& frame_rate = frame_rate_map_[name];
+  if (frame_rate.start == 0) {
+    frame_rate.start = GetBootTimeNs();
+    frame_rate.count = 0;
+    frame_rate.end = 0;
+  } else {
+    ++frame_rate.count;
+    frame_rate.end = GetBootTimeNs();
+  }
+
+  if ((setting_ & SetPropFlag::kPrintFpsPerIntervalBit) == 0) {
+    return;
+  }
+  // Print FPS every second
+  TimeSlot& realtime_frame_rate = realtime_frame_rate_map_[name];
+  if (realtime_frame_rate.start == 0) {
+    realtime_frame_rate.start = GetBootTimeNs();
+    realtime_frame_rate.count = 0;
+  } else {
+    ++realtime_frame_rate.count;
+    int64_t current = GetBootTimeNs();
+    int64_t elapsed = current - realtime_frame_rate.start;
+    if (elapsed > kNsPerSec * fps_print_interval_seconds_) {
+      float fps =
+          realtime_frame_rate.count * kNsPerSec / static_cast<float>(elapsed);
+      float avg_fps = frame_rate.count * kNsPerSec /
+                      static_cast<float>(frame_rate.end - frame_rate.start);
+      ALOGI("%s: current FPS %3.2f, avg %3.2f", name.c_str(), fps, avg_fps);
+      realtime_frame_rate.count = 0;
+      realtime_frame_rate.start = current;
+    }
+  }
+}
+
+void ProfilerImpl::Start(const std::string& name, int request_id) {
   if (setting_ == SetPropFlag::kDisable) {
     return;
   }
-  int index = (request_id == kInvalidRequestId ? 0 : request_id);
 
-  std::lock_guard<std::mutex> lk(lock_);
+  // When the request_id == kInvalidRequestId, it is served as a different
+  // purpose, eg. profiling first frame latency, or HAL total runtime. The valid
+  // request id is shifted by 1 to avoid the conflict.
+  int valid_request_id = (request_id == kInvalidRequestId) ? 0 : request_id + 1;
 
-  if (static_cast<std::size_t>(index) < timing_map_[name].size()) {
-    TimeSlot& slot = timing_map_[name][index];
-    slot.end += CurrentTime();
-    slot.count++;
+  {
+    std::lock_guard<std::mutex> lk(lock_);
+    TimeSeries& time_series = timing_map_[name];
+    for (int i = time_series.size(); i <= valid_request_id; ++i) {
+      time_series.push_back(TimeSlot());
+    }
+    TimeSlot& slot = time_series[valid_request_id];
+    slot.request_id = valid_request_id;
+    slot.start += GetBootTimeNs();
+  }
+
+  if ((setting_ & SetPropFlag::kCalculateFpsOnEndBit) == 0) {
+    ProfileFrameRate(name);
+  }
+}
+
+void ProfilerImpl::End(const std::string& name, int request_id) {
+  if (setting_ == SetPropFlag::kDisable) {
+    return;
+  }
+
+  // When the request_id == kInvalidRequestId, it is served as a different
+  // purpose, eg. profiling first frame latency, or HAL total runtime. The valid
+  // request id is shifted by 1 to avoid the conflict.
+  int valid_request_id = (request_id == kInvalidRequestId) ? 0 : request_id + 1;
+
+  {
+    std::lock_guard<std::mutex> lk(lock_);
+    if (static_cast<std::size_t>(valid_request_id) < timing_map_[name].size()) {
+      TimeSlot& slot = timing_map_[name][valid_request_id];
+      slot.end += GetBootTimeNs();
+      ++slot.count;
+    }
+  }
+
+  if ((setting_ & SetPropFlag::kCalculateFpsOnEndBit) != 0) {
+    ProfileFrameRate(name);
   }
 }
 
 void ProfilerImpl::PrintResult() {
-  ALOGE("UseCase: %s. Profiled Frames: %d.", use_case_.c_str(),
+  ALOGI("UseCase: %s. Profiled Frames: %d.", use_case_.c_str(),
         static_cast<int>(timing_map_.begin()->second.size()));
 
   std::vector<TimeResult> time_results;
 
   float sum_avg = 0.f;
   float max_max = 0.f;
+  float sum_min = 0.f;
   float sum_max = 0.f;
   for (const auto& [node_name, time_series] : timing_map_) {
     int num_frames = 0;
     int num_samples = 0;
-    float sum_dt = 0;
-    float max_dt = 0;
+    float sum_dt = 0.f;
+    float min_dt = std::numeric_limits<float>::max();
+    float max_dt = 0.f;
+    float mean_dt = 0.f;
+    std::vector<float> elapses;
     for (const auto& slot : time_series) {
       if (slot.count > 0) {
         float elapsed = (slot.end - slot.start) * kNanoToMilli;
         sum_dt += elapsed;
         num_samples += slot.count;
+        min_dt = std::min(min_dt, elapsed);
         max_dt = std::max(max_dt, elapsed);
         num_frames++;
+        elapses.push_back(elapsed);
       }
     }
     if (num_samples == 0) {
@@ -230,28 +367,60 @@
     float avg = sum_dt / std::max(1, num_samples);
     float avg_count = static_cast<float>(num_samples) /
                       static_cast<float>(std::max(1, num_frames));
-    sum_avg += avg * avg_count;
+    mean_dt = avg * avg_count;
+    sum_avg += mean_dt;
+    sum_min += min_dt;
     sum_max += max_dt;
     max_max = std::max(max_max, max_dt);
 
-    time_results.push_back({node_name, max_dt, avg * avg_count, avg_count});
+    // calculate StandardDeviation
+    float mean_max_stddevs = 0.f;
+    if (elapses.size() > 1) {
+      float dev_dt = StandardDeviation(elapses, mean_dt);
+      mean_max_stddevs = (max_dt - mean_dt) / dev_dt;
+    }
+
+    TimeSlot& frame_rate = frame_rate_map_[node_name];
+    int64_t duration = frame_rate.end - frame_rate.start;
+    float fps = 0;
+    if (duration > kNsPerSec) {
+      fps = frame_rate.count * kNsPerSec / static_cast<float>(duration);
+    }
+    time_results.push_back(
+        {node_name, min_dt, max_dt, mean_dt, avg_count, fps, mean_max_stddevs});
   }
 
   std::sort(time_results.begin(), time_results.end(),
             [](auto a, auto b) { return a.avg_dt > b.avg_dt; });
 
-  for (const auto it : time_results) {
-    ALOGE("%51.51s Max: %8.3f ms       Avg: %7.3f ms (Count = %3.1f)",
-          it.node_name.c_str(), it.max_dt, it.avg_dt, it.avg_count);
+  for (const auto& result : time_results) {
+    if (result.fps == 0) {
+      ALOGI(
+          "%51.51s Min: %8.3f ms,  Max: %8.3f ms,  Avg: %7.3f ms "
+          "(Count = %3.1f),  mean_max_stddevs: %6.2f,  fps:    NA",
+          result.node_name.c_str(), result.min_dt, result.max_dt, result.avg_dt,
+          result.avg_count, result.mean_max_stddevs);
+    } else {
+      ALOGI(
+          "%51.51s Min: %8.3f ms,  Max: %8.3f ms,  Avg: %7.3f ms "
+          "(Count = %3.1f),  mean_max_stddevs: %6.2f,  fps: %8.2f",
+          result.node_name.c_str(), result.min_dt, result.max_dt, result.avg_dt,
+          result.avg_count, result.mean_max_stddevs, result.fps);
+    }
   }
 
-  ALOGE("%43.43s     MAX SUM: %8.3f ms,  AVG SUM: %7.3f ms", "", sum_max,
-        sum_avg);
-  ALOGE("");
+  ALOGI("%43.43s     MIN SUM: %8.3f ms,  MAX SUM: %8.3f ms,  AVG SUM: %7.3f ms",
+        "", sum_min, sum_max, sum_avg);
+  ALOGI("");
 }
 
-void ProfilerImpl::DumpResult(std::string filepath) {
+void ProfilerImpl::DumpTxt(std::string_view filepath) {
+  // The dump result data is organized as 3 sections:
+  //  1. detla time and fps of each frame.
+  //  2. start time of each frame.
+  //  3. end time of each frame.
   if (std::ofstream fout(filepath, std::ios::out); fout.is_open()) {
+    fout << "// PROFILER_DELTA_TIME_AND_FPS, UNIT:MILLISECOND //\n";
     for (const auto& [node_name, time_series] : timing_map_) {
       fout << node_name << " ";
       for (const auto& time_slot : time_series) {
@@ -260,11 +429,88 @@
         fout << elapsed * kNanoToMilli << " ";
       }
       fout << "\n";
+      TimeSlot& frame_rate = frame_rate_map_[node_name];
+      int64_t duration = frame_rate.end - frame_rate.start;
+      float fps = 0;
+      if (duration > kNsPerSec) {
+        fps = frame_rate.count * kNsPerSec / static_cast<float>(duration);
+      }
+      if (fps > 0) {
+        fout << node_name << " fps:" << fps;
+      } else {
+        fout << node_name << " fps: NA";
+      }
+      fout << "\n";
+    }
+
+    fout << "\n// PROFILER_START_TIME, AVG TIMESTAMP, UNIT:NANOSECOND //\n";
+    for (const auto& [node_name, time_series] : timing_map_) {
+      fout << node_name << " ";
+      for (const auto& time_slot : time_series) {
+        int64_t avg_time_stamp = time_slot.start / std::max(1, time_slot.count);
+        fout << avg_time_stamp << " ";
+      }
+      fout << "\n";
+    }
+
+    fout << "\n// PROFILER_END_TIME, AVG TIMESTAMP,  UNIT:NANOSECOND //\n";
+    for (const auto& [node_name, time_series] : timing_map_) {
+      fout << node_name << " ";
+      for (const auto& time_slot : time_series) {
+        int64_t avg_time_stamp = time_slot.end / std::max(1, time_slot.count);
+        fout << avg_time_stamp << " ";
+      }
+      fout << "\n";
     }
     fout.close();
   }
 }
 
+void ProfilerImpl::DumpPb(std::string_view filepath) {
+  if (std::ofstream fout(filepath, std::ios::out); fout.is_open()) {
+    profiler::ProfilingResult profiling_result;
+    profiling_result.set_usecase(use_case_);
+    profiling_result.set_profile_start_time_nanos(object_init_real_time_);
+    profiling_result.set_profile_start_boottime_nanos(object_init_boot_time_);
+    profiling_result.set_profile_end_time_nanos(GetRealTimeNs());
+
+    for (const auto& [node_name, time_series] : timing_map_) {
+      profiler::TimeSeries& target = *profiling_result.add_target();
+      target.set_name(node_name);
+      for (const auto& time_slot : time_series) {
+        profiler::TimeStamp& time_stamp = *target.add_runtime();
+        // A single node can be called multiple times in a frame. Every time the
+        // node is called in the same frame, the profiler accumulates the
+        // timestamp value in time_slot.start/end, and increments the count.
+        // Therefore the result timestamp we stored is the `average` timestamp.
+        // Note: consider using minimum-start, and maximum-end.
+        time_stamp.set_start(time_slot.start / std::max(1, time_slot.count));
+        time_stamp.set_end(time_slot.end / std::max(1, time_slot.count));
+        time_stamp.set_count(time_slot.count);
+        time_stamp.set_request_id(time_slot.request_id);
+      }
+    }
+    profiling_result.SerializeToOstream(&fout);
+    fout.close();
+  }
+}
+
+// Get the latency associated with the name
+int64_t ProfilerImpl::GetLatencyInNanoseconds(const std::string& name,
+                                              int request_id) {
+  // Will use name to add various TraceInt64 here
+  int valid_request_id = (request_id == kInvalidRequestId) ? 0 : request_id + 1;
+  int64_t latency_ns = 0;
+  {
+    std::lock_guard<std::mutex> lk(lock_);
+    if (static_cast<std::size_t>(valid_request_id) < timing_map_[name].size()) {
+      TimeSlot& slot = timing_map_[name][valid_request_id];
+      latency_ns = slot.end - slot.start;
+    }
+  }
+  return latency_ns;
+}
+
 class ProfilerStopwatchImpl : public ProfilerImpl {
  public:
   ProfilerStopwatchImpl(SetPropFlag setting) : ProfilerImpl(setting){};
@@ -274,18 +520,23 @@
       return;
     }
     if (setting_ & SetPropFlag::kPrintBit) {
-      // Virtual function won't work in parent class's destructor. need to call
-      // it by ourself.
+      // Virtual function won't work in parent class's destructor. need to
+      // call it by ourself.
       PrintResult();
       // Erase the print bit to prevent parent class print again.
-      setting_ = static_cast<SetPropFlag>(setting_ & (!SetPropFlag::kPrintBit));
+      setting_ = static_cast<SetPropFlag>(setting_ & (~SetPropFlag::kPrintBit));
+    }
+    if (setting_ & SetPropFlag::kDumpBit) {
+      DumpResult(dump_file_prefix_ + use_case_ + "-TS" +
+                 std::to_string(object_init_real_time_) + ".txt");
+      setting_ = static_cast<SetPropFlag>(setting_ & (~SetPropFlag::kDumpBit));
     }
   }
 
   // Print out the profiling result in the standard output (ANDROID_LOG_ERROR)
   // with stopwatch mode.
   void PrintResult() override {
-    ALOGE("Profiling Case: %s", use_case_.c_str());
+    ALOGI("Profiling Case: %s", use_case_.c_str());
 
     // Sort by end time.
     std::list<std::pair<std::string, TimeSlot>> time_results;
@@ -303,11 +554,24 @@
     for (const auto& [node_name, slot] : time_results) {
       if (slot.count > 0) {
         float elapsed = (slot.end - slot.start) * kNanoToMilli;
-        ALOGE("%51.51s: %8.3f ms", node_name.c_str(), elapsed);
+        ALOGI("%51.51s: %8.3f ms", node_name.c_str(), elapsed);
       }
     }
 
-    ALOGE("");
+    ALOGI("");
+  }
+
+  void DumpResult(const std::string& filepath) override {
+    if (std::ofstream fout(filepath, std::ios::out); fout.is_open()) {
+      for (const auto& [node_name, time_series] : timing_map_) {
+        fout << node_name << " ";
+        for (const auto& slot : time_series) {
+          fout << (slot.end - slot.start) * kNanoToMilli << " ";
+        }
+        fout << "\n";
+      }
+      fout.close();
+    }
   }
 };
 
@@ -317,19 +581,15 @@
   ProfilerDummy(){};
   ~ProfilerDummy(){};
 
-  void SetUseCase(std::string) override final {
-  }
-
-  void SetDumpFilePrefix(std::string) override final {
-  }
-
-  void Start(const std::string, int) override final {
-  }
-
-  void End(const std::string, int) override final {
-  }
-
-  void PrintResult() override final {
+  void SetUseCase(std::string) override final{};
+  void SetDumpFilePrefix(const std::string&) override final{};
+  void Start(const std::string&, int) override final{};
+  void End(const std::string&, int) override final{};
+  void PrintResult() override final{};
+  void ProfileFrameRate(const std::string&) override final{};
+  void SetFpsPrintInterval(int32_t) override final{};
+  int64_t GetLatencyInNanoseconds(const std::string&, int) override final {
+    return 0;
   }
 };
 
@@ -339,11 +599,11 @@
   SetPropFlag flag = static_cast<SetPropFlag>(option);
 
   if (flag == SetPropFlag::kDisable) {
-    return std::make_unique<ProfilerDummy>();
+    return std::make_shared<ProfilerDummy>();
   } else if (flag & SetPropFlag::kStopWatch) {
-    return std::make_unique<ProfilerStopwatchImpl>(flag);
+    return std::make_shared<ProfilerStopwatchImpl>(flag);
   } else {
-    return std::make_unique<ProfilerImpl>(flag);
+    return std::make_shared<ProfilerImpl>(flag);
   }
 }
 
diff --git a/common/profiler/profiler.h b/common/profiler/profiler.h
index debfe6f..9e8f16f 100644
--- a/common/profiler/profiler.h
+++ b/common/profiler/profiler.h
@@ -39,6 +39,67 @@
 //    $ adb shell setprop persist.vendor.camera.profiler 2
 //  - To print and dump the profiling result to "/data/vendor/camera/profiler":
 //    $ adb shell setprop persist.vendor.camera.profiler 3
+//  After add FPS option, here are the combination results.
+//  Option 0 (kDisable): Disable Profiler
+//  Option 1 (kPrintBit):
+//    When close, Print the result
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 2 (kDumpBit):
+//    When close, dump the result to file(dump_file_prefix)
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 3 (kPrintBit|kDumpBit):
+//    When close, print and dump the result
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 8 (kPrintFpsPerIntervalBit):
+//    Print FPS per interval on ProfileFrameRate function
+//    The frequency is based on the value of SetFpsPrintInterval()
+//  Option 9 (kPrintFpsPerIntervalBit|kPrintBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, print the result
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 10 (kPrintFpsPerIntervalBit|kDumpBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, dump the result
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 11 (kPrintFpsPerIntervalBit|kPrintBit|kDumpBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, print and dump the result
+//    - Processing time
+//    - FPS with total frames on process start function
+//  Option 16 (kCalculateFpsOnEndBit):
+//    Calculate FPS on process end function instead of process start function
+//  Option 17 (kCalculateFpsOnEndBit|kPrintBit):
+//    When close, print the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
+//  Option 18 (kCalculateFpsOnEndBit|kDumpBit):
+//    When close, dump the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
+//  Option 19 (kCalculateFpsOnEndBit|kPrintBitk|DumpBit):
+//    When close, print and dump the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
+//  Option 25 (kCalculateFpsOnEndBit|kPrintFpsPerIntervalBit|kPrintBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, print the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
+//  Option 26 (kCalculateFpsOnEndBit|kPrintFpsPerIntervalBit|DumpBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, dump the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
+//  Option 27 (kCalculateFpsOnEndBit|kPrintFpsPerIntervalBit|kPrintBitk|DumpBit):
+//    Print FPS per interval on process start and ProfileFrameRate function
+//    When close, print and dump the result
+//    - Processing time
+//    - FPS with total frames on process "end" function
 //
 //  By default the profiler is disabled.
 //
@@ -86,14 +147,24 @@
   // Create profiler.
   static std::shared_ptr<Profiler> Create(int option);
 
-  virtual ~Profiler(){};
+  virtual ~Profiler() = default;
 
   // adb setprop options.
   enum SetPropFlag {
     kDisable = 0,
     kPrintBit = 1 << 0,
     kDumpBit = 1 << 1,
-    kStopWatch = 1 << 2
+    kStopWatch = 1 << 2,
+    // Print FPS per interval time based on the value of SetFpsPrintInterval()
+    kPrintFpsPerIntervalBit = 1 << 3,
+    // Calculate FPS on process end function instead of process start function
+    kCalculateFpsOnEndBit = 1 << 4,
+    // Dynamic start profiling.
+    kDynamicStartBit = 1 << 5,
+    // Dumps result using proto format.
+    kProto = 1 << 6,
+    // Customized profiler derived from Profiler
+    kCustomProfiler = 1 << 7,
   };
 
   // Setup the name of use case the profiler is running.
@@ -106,7 +177,7 @@
   //  dump_file_prefix: file prefix name. In the current setting,
   //    "/data/vendor/camera/" is a valid folder for camera to dump file.
   //    A valid prefix can be "/data/vendor/camera/test_prefix_".
-  virtual void SetDumpFilePrefix(std::string dump_file_prefix) = 0;
+  virtual void SetDumpFilePrefix(const std::string& dump_file_prefix) = 0;
 
   // Start to profile.
   // We use start and end to choose which code snippet to be profile.
@@ -115,19 +186,32 @@
   // Arguments:
   //   name: the name of the node to be profiled.
   //   request_id: frame requesd id.
-  virtual void Start(const std::string name, int request_id) = 0;
+  virtual void Start(const std::string& name, int request_id) = 0;
 
   // End the profileing.
   // Arguments:
   //   name: the name of the node to be profiled. Should be the same in Start().
   //   request_id: frame requesd id.
-  virtual void End(const std::string name, int request_id) = 0;
+  virtual void End(const std::string& name, int request_id) = 0;
 
   // Print out the profiling result in the standard output (ANDROID_LOG_ERROR).
   virtual void PrintResult() = 0;
 
+  // Profile the frame rate
+  // If only call this function without start() and End(),
+  // creating profiler needs to set option with kPrintFpsPerIntervalBit bit.
+  // It can print FPS every second.
+  virtual void ProfileFrameRate(const std::string& name) = 0;
+
+  // Set the interval of FPS print
+  // The interval unit is second and interval_seconds must >= 1
+  virtual void SetFpsPrintInterval(int32_t interval_seconds) = 0;
+
+  virtual int64_t GetLatencyInNanoseconds(const std::string& name,
+                                          int request_id) = 0;
+
  protected:
-  Profiler(){};
+  Profiler() = default;
 };
 
 // A scoped utility class to facilitate profiling.
diff --git a/common/profiler/profiler.proto b/common/profiler/profiler.proto
new file mode 100644
index 0000000..d18ecc8
--- /dev/null
+++ b/common/profiler/profiler.proto
@@ -0,0 +1,37 @@
+// This is the definition for camera profiler message.
+syntax = "proto2";
+
+package profiler;
+
+// Time stamp of start and end processing time.
+message TimeStamp {
+  optional uint64 start = 1;
+  optional uint64 end = 2;
+  optional uint32 count = 3;
+  optional int32 request_id = 4;
+}
+
+// TimeSeries stores the target name and a series of time stamps and fps.
+message TimeSeries {
+  optional string name = 1;
+  repeated TimeStamp runtime = 2;
+}
+
+// Profilering result stores the usecase name, and the targets' runtime it
+// profiled.
+message ProfilingResult {
+  // The usecase indicates which mode/session it is profiling. For example,
+  // camera preview, video preview, etc.
+  optional string usecase = 1;
+  repeated TimeSeries target = 2;
+  optional string build_id = 3;
+  optional string device_type = 4;
+  optional string device_serial = 5;
+  // Unix epoch timestamp when profile started.
+  optional int64 profile_start_time_nanos = 6;
+  // CLOCK_BOOTTIME value as near as possible to the same moment
+  // as profile_start_time_millis.
+  optional uint64 profile_start_boottime_nanos = 7;
+  // Unix epoch timestamp when profile ended.
+  optional int64 profile_end_time_nanos = 8;
+}
diff --git a/common/sensor_listener/Android.bp b/common/sensor_listener/Android.bp
new file mode 100644
index 0000000..56e6d7b
--- /dev/null
+++ b/common/sensor_listener/Android.bp
@@ -0,0 +1,72 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library {
+    name: "lib_sensor_listener",
+    vendor: true,
+    owner: "google",
+    host_supported: true,
+
+    srcs: [
+        "goog_gyro_direct.cc",
+        "goog_gralloc_wrapper.cc",
+        "goog_sensor_environment.cc",
+        "goog_sensor_motion.cc",
+        "goog_sensor_sync.cc",
+        "goog_sensor_wrapper.cc",
+    ],
+
+    include_dirs: ["."],
+    export_include_dirs: ["."],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-Wthread-safety",
+        "-Wno-unused-parameter",
+        "-Wno-unused-function",
+        "-Wno-unused-variable",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+    shared_libs: [
+        "android.frameworks.sensorservice@1.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.sensors@1.0",
+        "android.hidl.allocator@1.0",
+        "libbase",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libutils",
+    ],
+    export_shared_lib_headers: [
+        "android.frameworks.sensorservice@1.0",
+        "android.hardware.graphics.allocator@3.0",
+    ],
+}
+
+cc_test {
+    name: "lib_sensor_listener_test",
+    gtest: true,
+    vendor: true,
+    owner: "google",
+
+    local_include_dirs: ["."],
+
+    srcs: [
+        "tests/goog_gyro_test.cc",
+        "tests/goog_sensor_environment_test.cc",
+        "tests/goog_sensor_motion_test.cc",
+        "tests/goog_sensor_sync_test.cc",
+    ],
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libutils",
+        "lib_sensor_listener",
+    ],
+}
diff --git a/common/sensor_listener/OWNERS b/common/sensor_listener/OWNERS
new file mode 100644
index 0000000..b18a027
--- /dev/null
+++ b/common/sensor_listener/OWNERS
@@ -0,0 +1,3 @@
+krzysio@google.com
+xuetu@google.com
+zhijunhe@google.com
diff --git a/common/sensor_listener/goog_gralloc_wrapper.cc b/common/sensor_listener/goog_gralloc_wrapper.cc
new file mode 100644
index 0000000..df30bf7
--- /dev/null
+++ b/common/sensor_listener/goog_gralloc_wrapper.cc
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "goog_gralloc_wrapper"
+
+#include "goog_gralloc_wrapper.h"
+
+#include <utils/Log.h>
+
+namespace android {
+namespace camera_sensor_listener {
+
+using ::android::hardware::hidl_handle;
+using ::android::hardware::graphics::allocator::V3_0::IAllocator;
+using ::android::hardware::graphics::mapper::V3_0::BufferDescriptor;
+using ::android::hardware::graphics::mapper::V3_0::Error;
+using ::android::hardware::graphics::mapper::V3_0::IMapper;
+
+GoogGrallocWrapper::GoogGrallocWrapper() {
+  allocator_ = IAllocator::getService();
+  if (allocator_ == nullptr) {
+    ALOGE("%s %d Failed to get allocator service", __func__, __LINE__);
+  }
+
+  mapper_ = IMapper::getService();
+  if (mapper_ == nullptr) {
+    ALOGE("%s %d Failed to get mapper service", __func__, __LINE__);
+  }
+  if (mapper_->isRemote()) {
+    ALOGE("%s %d Mapper is not in passthrough mode", __func__, __LINE__);
+  }
+}
+
+GoogGrallocWrapper::~GoogGrallocWrapper() {
+  for (auto buffer_handle : cloned_buffers_) {
+    auto buffer = const_cast<native_handle_t*>(buffer_handle);
+    native_handle_close(buffer);
+    native_handle_delete(buffer);
+  }
+  cloned_buffers_.clear();
+
+  for (auto buffer_handle : imported_buffers_) {
+    auto buffer = const_cast<native_handle_t*>(buffer_handle);
+    if (mapper_->freeBuffer(buffer) != Error::NONE) {
+      ALOGE("%s %d Failed to free buffer %p", __func__, __LINE__, buffer);
+    }
+  }
+  imported_buffers_.clear();
+}
+
+sp<IAllocator> GoogGrallocWrapper::GetAllocator() const {
+  return allocator_;
+}
+
+std::string GoogGrallocWrapper::DumpDebugInfo() const {
+  std::string debug_info;
+  allocator_->dumpDebugInfo([&debug_info](const auto& tmp_debug_info) {
+    debug_info = tmp_debug_info.c_str();
+  });
+
+  return debug_info;
+}
+
+const native_handle_t* GoogGrallocWrapper::CloneBuffer(
+    const hidl_handle& raw_handle) {
+  const native_handle_t* buffer_handle =
+      native_handle_clone(raw_handle.getNativeHandle());
+
+  if (buffer_handle) {
+    cloned_buffers_.insert(buffer_handle);
+  }
+  return buffer_handle;
+}
+
+// When import is false, this simply calls IAllocator::allocate. When import
+// is true, the returned buffers are also imported into the mapper.
+// Either case, the returned buffers must be freed with freeBuffer.
+std::vector<const native_handle_t*> GoogGrallocWrapper::Allocate(
+    const BufferDescriptor& descriptor, uint32_t count, bool import,
+    uint32_t* out_stride) {
+  std::vector<const native_handle_t*> buffer_handles;
+  buffer_handles.reserve(count);
+  allocator_->allocate(
+      descriptor, count,
+      [&](const auto& tmp_error, const auto& tmp_stride,
+          const auto& tmp_buffers) {
+        if (tmp_error != Error::NONE) {
+          ALOGE("%s %d Failed to allocate buffers", __func__, __LINE__);
+        }
+        if (count != tmp_buffers.size()) {
+          ALOGE("%s %d Invalid buffer array", __func__, __LINE__);
+        }
+
+        for (uint32_t i = 0; i < count; i++) {
+          if (import) {
+            buffer_handles.push_back(ImportBuffer(tmp_buffers[i]));
+          } else {
+            buffer_handles.push_back(CloneBuffer(tmp_buffers[i]));
+          }
+        }
+
+        if (out_stride) {
+          *out_stride = tmp_stride;
+        }
+      });
+
+  return buffer_handles;
+}
+
+const native_handle_t* GoogGrallocWrapper::AllocateOneBuffer(
+    const IMapper::BufferDescriptorInfo& descriptor_info, bool import,
+    uint32_t* out_stride) {
+  BufferDescriptor descriptor = CreateDescriptor(descriptor_info);
+  auto buffers = Allocate(descriptor, 1, import, out_stride);
+  return buffers[0];
+}
+
+sp<IMapper> GoogGrallocWrapper::GetMapper() const {
+  return mapper_;
+}
+
+BufferDescriptor GoogGrallocWrapper::CreateDescriptor(
+    const IMapper::BufferDescriptorInfo& descriptor_info) {
+  BufferDescriptor descriptor;
+  mapper_->createDescriptor(
+      descriptor_info,
+      [&descriptor](const auto& tmp_error, const auto& tmp_descriptor) {
+        if (tmp_error != Error::NONE) {
+          ALOGE("Failed to create descriptor");
+        }
+        descriptor = tmp_descriptor;
+      });
+
+  return descriptor;
+}
+
+const native_handle_t* GoogGrallocWrapper::ImportBuffer(
+    const hidl_handle& raw_handle) {
+  const native_handle_t* buffer_handle = nullptr;
+  mapper_->importBuffer(
+      raw_handle, [&buffer_handle, &raw_handle](const auto& tmp_error,
+                                                const auto& tmp_buffer) {
+        if (tmp_error != Error::NONE) {
+          ALOGE("%s %d Failed to import buffer %p", __func__, __LINE__,
+                raw_handle.getNativeHandle());
+        }
+        buffer_handle = static_cast<const native_handle_t*>(tmp_buffer);
+      });
+
+  if (buffer_handle) {
+    imported_buffers_.insert(buffer_handle);
+  }
+
+  return buffer_handle;
+}
+
+void GoogGrallocWrapper::FreeBuffer(const native_handle_t* buffer_handle) {
+  auto buffer = const_cast<native_handle_t*>(buffer_handle);
+
+  if (imported_buffers_.erase(buffer_handle)) {
+    Error error = mapper_->freeBuffer(buffer);
+    if (error != Error::NONE) {
+      ALOGE("%s %d Failed to free %p", __func__, __LINE__, buffer);
+    }
+  } else {
+    cloned_buffers_.erase(buffer_handle);
+    native_handle_close(buffer);
+    native_handle_delete(buffer);
+  }
+}
+
+// We use fd instead of ::android::hardware::hidl_handle in these functions to
+// pass fences in and out of the mapper.  The ownership of the fd is always
+// transferred with each of these functions.
+void* GoogGrallocWrapper::Lock(const native_handle_t* buffer_handle,
+                               uint64_t cpu_usage,
+                               const IMapper::Rect& access_region,
+                               int acquire_fence) {
+  auto buffer = const_cast<native_handle_t*>(buffer_handle);
+
+  NATIVE_HANDLE_DECLARE_STORAGE(acquire_fence_storage, 1, 0);
+  hidl_handle acquire_fence_handle;
+  if (acquire_fence >= 0) {
+    auto h = native_handle_init(acquire_fence_storage, 1, 0);
+    h->data[0] = acquire_fence;
+    acquire_fence_handle = h;
+  }
+
+  void* data = nullptr;
+  mapper_->lock(buffer, cpu_usage, access_region, acquire_fence_handle,
+                [&buffer, &data](const auto& tmp_error, const auto& tmp_data,
+                                 const auto& /*bytesPerPixel*/,
+                                 const auto& /*bytesPerStride*/) {
+                  if (tmp_error != Error::NONE) {
+                    ALOGE("Failed to lock buffer %p", buffer);
+                  } else {
+                    data = tmp_data;
+                  }
+                });
+
+  if (acquire_fence >= 0) {
+    close(acquire_fence);
+  }
+
+  return data;
+}
+
+int GoogGrallocWrapper::Unlock(const native_handle_t* buffer_handle) {
+  auto buffer = const_cast<native_handle_t*>(buffer_handle);
+
+  int release_fence = -1;
+  mapper_->unlock(buffer,
+                  [&buffer, &release_fence](const auto& tmp_error,
+                                            const auto& tmp_release_fence) {
+                    if (tmp_error != Error::NONE) {
+                      ALOGE("Failed to unlock buffer %p", buffer);
+                    }
+
+                    auto fence_handle = tmp_release_fence.getNativeHandle();
+                    if (fence_handle) {
+                      if (fence_handle->numInts != 0) {
+                        ALOGE("Invalid fence handle %p", fence_handle);
+                      }
+                      if (fence_handle->numFds == 1) {
+                        release_fence = dup(fence_handle->data[0]);
+                        if (release_fence < 0) {
+                          ALOGE("Failed to dup fence fd");
+                        }
+                      } else {
+                        if (fence_handle->numFds != 0) {
+                          ALOGE("Invalid fence handle %p", fence_handle);
+                        }
+                      }
+                    }
+                  });
+
+  return release_fence;
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/goog_gralloc_wrapper.h b/common/sensor_listener/goog_gralloc_wrapper.h
new file mode 100644
index 0000000..02755e4
--- /dev/null
+++ b/common/sensor_listener/goog_gralloc_wrapper.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GRALLOC_WRAPPER_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GRALLOC_WRAPPER_H_
+
+#include <unordered_set>
+
+#include <android/frameworks/sensorservice/1.0/ISensorManager.h>
+#include <android/hardware/graphics/allocator/3.0/IAllocator.h>
+#include <android/hardware/graphics/mapper/3.0/IMapper.h>
+#include <sys/mman.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+namespace camera_sensor_listener {
+
+// GoogGrallocWrapper is a wrapper class to
+// ::android::hardware::graphics::allocator::V3_0::IAllocator and
+// ::android::hardware::graphics::mapper::V3_0::IMapper.
+// It can used by direct channel based sensor listener class.
+// Modified from //hardware/interfaces/graphics/mapper/2.0/utils/vts
+// /include/mapper-vts/2.0/MapperVts.h.
+class GoogGrallocWrapper {
+ public:
+  // Constructor.
+  GoogGrallocWrapper();
+
+  // Destructor.
+  ~GoogGrallocWrapper();
+
+  // Get StrongPointer to IAllocator.
+  sp<::android::hardware::graphics::allocator::V3_0::IAllocator> GetAllocator()
+      const;
+
+  // Wrapper of IAllocator dumpDebugInfo method.
+  std::string DumpDebugInfo() const;
+
+  // Wrapper of IAllocator allocate method.
+  std::vector<const native_handle_t*> Allocate(
+      const ::android::hardware::graphics::mapper::V3_0::BufferDescriptor&
+          descriptor,
+      uint32_t count, bool import = true, uint32_t* out_stride = nullptr);
+
+  // Special case of Allocate, where allocated buffer count is 1.
+  const native_handle_t* AllocateOneBuffer(
+      const ::android::hardware::graphics::mapper::V3_0::IMapper::
+          BufferDescriptorInfo& descriptor_info,
+      bool import = true, uint32_t* out_stride = nullptr);
+
+  // Get StrongPointer to IMapper.
+  sp<::android::hardware::graphics::mapper::V3_0::IMapper> GetMapper() const;
+
+  // Wrapper of IMapper createDescriptor method.
+  ::android::hardware::graphics::mapper::V3_0::BufferDescriptor CreateDescriptor(
+      const ::android::hardware::graphics::mapper::V3_0::IMapper::
+          BufferDescriptorInfo& descriptor_info);
+
+  // Wrapper of IMapper importBuffer method.
+  const native_handle_t* ImportBuffer(
+      const ::android::hardware::hidl_handle& raw_handle);
+
+  // Wrapper of IMapper freeBuffer method.
+  void FreeBuffer(const native_handle_t* buffer_handle);
+
+  // Wrapper of Imapper lock method.
+  // We use fd instead of hardware::hidl_handle in these functions to pass
+  // fences in and out of the mapper.  The ownership of the fd is always
+  // transferred with each of these functions.
+  void* Lock(const native_handle_t* buffer_handle, uint64_t cpu_usage,
+             const ::android::hardware::graphics::mapper::V3_0::IMapper::Rect&
+                 access_region,
+             int acquire_fence);
+
+  // Wrapper of Imapper unlock method.
+  int Unlock(const native_handle_t* buffer_handle);
+
+ private:
+  const native_handle_t* CloneBuffer(const hardware::hidl_handle& raw_handle);
+
+  // StrongPointer to IAllocator.
+  sp<::android::hardware::graphics::allocator::V3_0::IAllocator> allocator_;
+
+  // StrongPointer to IMapper.
+  sp<::android::hardware::graphics::mapper::V3_0::IMapper> mapper_;
+
+  // Set of cloned buffer handles.
+  // Keep track of all cloned and imported handles.  When a test fails with
+  // ASSERT_*, the destructor will free the handles for the test.
+  std::unordered_set<const native_handle_t*> cloned_buffers_;
+
+  // Set of imported buffer handles.
+  std::unordered_set<const native_handle_t*> imported_buffers_;
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GRALLOC_WRAPPER_H_
diff --git a/common/sensor_listener/goog_gyro_direct.cc b/common/sensor_listener/goog_gyro_direct.cc
new file mode 100644
index 0000000..f19290f
--- /dev/null
+++ b/common/sensor_listener/goog_gyro_direct.cc
@@ -0,0 +1,259 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "goog_gyro_direct"
+
+#include <android/frameworks/sensorservice/1.0/ISensorManager.h>
+#include <android/frameworks/sensorservice/1.0/types.h>
+#include <android/hardware/sensors/1.0/types.h>
+#include <hardware/sensors.h>
+#include <utils/Log.h>
+#include <utils/SystemClock.h>
+
+#include "goog_gralloc_wrapper.h"
+
+#include "goog_gyro_direct.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+using ::android::frameworks::sensorservice::V1_0::ISensorManager;
+using ::android::frameworks::sensorservice::V1_0::Result;
+using ::android::hardware::graphics::mapper::V3_0::IMapper;
+using ::android::hardware::sensors::V1_0::RateLevel;
+using ::android::hardware::sensors::V1_0::SensorFlagBits;
+using ::android::hardware::sensors::V1_0::SensorsEventFormatOffset;
+using ::android::hardware::sensors::V1_0::SensorType;
+
+GoogGyroDirect::GoogGyroDirect(RateLevel rate_level,
+                               size_t gyro_direct_buf_length)
+    : gyro_direct_initialized_(false),
+      gyro_direct_enabled_(false),
+      gyro_direct_rate_level_(rate_level),
+      gyro_direct_buf_length_(gyro_direct_buf_length),
+      goog_gralloc_wrapper_ptr_(nullptr),
+      gyro_direct_channel_native_buf_handle_(nullptr),
+      gyro_direct_channel_addr_(nullptr) {
+  goog_gralloc_wrapper_ptr_ = std::make_unique<GoogGrallocWrapper>();
+}
+
+GoogGyroDirect::~GoogGyroDirect() {
+  if (gyro_direct_enabled_) {
+    DisableDirectChannel();
+  }
+  if (gyro_direct_channel_addr_) {
+    goog_gralloc_wrapper_ptr_->Unlock(gyro_direct_channel_native_buf_handle_);
+  }
+  if (gyro_direct_channel_native_buf_handle_) {
+    goog_gralloc_wrapper_ptr_->FreeBuffer(
+        gyro_direct_channel_native_buf_handle_);
+  }
+}
+
+std::unique_ptr<GoogGyroDirect> GoogGyroDirect::Create(
+    int64_t gyro_sampling_period_us, size_t event_queue_size) {
+  // convert sampling period us to RateLevel.
+  float sampling_rate =
+      static_cast<float>(1e6) / static_cast<float>(gyro_sampling_period_us);
+  RateLevel rate_level;
+  if (sampling_rate < 110.f) {
+    rate_level = RateLevel::NORMAL;
+  } else if (sampling_rate >= 110.f && sampling_rate < 440.f) {
+    rate_level = RateLevel::FAST;
+  } else {
+    rate_level = RateLevel::VERY_FAST;
+  }
+
+  std::unique_ptr<GoogGyroDirect> gyro_direct_ptr =
+      std::unique_ptr<GoogGyroDirect>(
+          new GoogGyroDirect(rate_level, event_queue_size));
+  if (gyro_direct_ptr == nullptr) {
+    ALOGE("%s %d failed to create GoogGyroDirect", __func__, __LINE__);
+  } else {
+    status_t result = gyro_direct_ptr->EnableDirectChannel();
+    if (result != 0) {
+      ALOGE("%s %d failed to enable GoogGyroDirect", __func__, __LINE__);
+    } else {
+      ALOGI("%s %d successfully enabled GoogGyroDirect", __func__, __LINE__);
+    }
+  }
+  return gyro_direct_ptr;
+}
+
+status_t GoogGyroDirect::DisableDirectChannel() {
+  if (gyro_direct_initialized_ && gyro_direct_enabled_) {
+    gyro_direct_channel_->configure(sensor_info_.sensorHandle, RateLevel::STOP,
+                                    [](const auto&, auto) {});
+    gyro_direct_enabled_ = false;
+  }
+  return OK;
+}
+
+status_t GoogGyroDirect::EnableDirectChannel() {
+  sp<ISensorManager> manager = ISensorManager::getService();
+  if (manager == nullptr) {
+    ALOGE("Cannot get ISensorManager");
+    return UNKNOWN_ERROR;
+  }
+  bool find = false;
+  manager->getDefaultSensor(SensorType::GYROSCOPE,
+                            [&](const auto& info, auto result) {
+                              if (result != Result::OK) {
+                                ALOGE("Cannot find default gyroscope");
+                                return;
+                              }
+                              sensor_info_ = info;
+                              find = true;
+                            });
+  ALOGV("%s %d gyro sensor handle find %d", __func__, __LINE__, find);
+  if (find == false) {
+    gyro_direct_enabled_ = false;
+    ALOGE("%s %d unable to find gyro sensor", __func__, __LINE__);
+    return UNKNOWN_ERROR;
+  }
+
+  // Initialize gyro direct channel buffer.
+  if (!gyro_direct_initialized_) {
+    bool channel_supported =
+        sensor_info_.flags & SensorFlagBits::DIRECT_CHANNEL_GRALLOC;
+    if (!channel_supported) {
+      ALOGE("%s %d Direct sesnor is not supported", __func__, __LINE__);
+      return UNKNOWN_ERROR;
+    }
+
+    size_t buffer_size =
+        static_cast<size_t>(SensorsEventFormatOffset::TOTAL_LENGTH) *
+        gyro_direct_buf_length_;
+
+    using android::hardware::graphics::common::V1_0::BufferUsage;
+    using android::hardware::graphics::common::V1_2::PixelFormat;
+    IMapper::BufferDescriptorInfo buf_desc_info = {
+        .width = static_cast<uint32_t>(buffer_size),
+        .height = 1,
+        .layerCount = 1,
+        .format = PixelFormat::BLOB,
+        .usage = static_cast<uint64_t>(BufferUsage::SENSOR_DIRECT_DATA |
+                                       BufferUsage::CPU_READ_OFTEN),
+    };
+
+    gyro_direct_channel_native_buf_handle_ =
+        goog_gralloc_wrapper_ptr_->AllocateOneBuffer(buf_desc_info);
+    if (gyro_direct_channel_native_buf_handle_ == nullptr) {
+      ALOGE("%s %d Failed at allocating the channel native buffer", __func__,
+            __LINE__);
+      return NO_MEMORY;
+    }
+
+    IMapper::Rect region{0, 0, static_cast<int32_t>(buf_desc_info.width),
+                         static_cast<int32_t>(buf_desc_info.height)};
+    gyro_direct_channel_addr_ = goog_gralloc_wrapper_ptr_->Lock(
+        gyro_direct_channel_native_buf_handle_, buf_desc_info.usage, region,
+        /*fence=*/-1);
+    if (gyro_direct_channel_addr_ == nullptr) {
+      goog_gralloc_wrapper_ptr_->FreeBuffer(
+          gyro_direct_channel_native_buf_handle_);
+      gyro_direct_channel_native_buf_handle_ = nullptr;
+      ALOGE("%s %d Failed at lock the gralloc buffer", __func__, __LINE__);
+      return UNKNOWN_ERROR;
+    }
+
+    gyro_direct_initialized_ = true;
+    manager->createGrallocDirectChannel(
+        gyro_direct_channel_native_buf_handle_, buffer_size,
+        [&](const auto& chan, auto result) {
+          if (result != Result::OK) {
+            gyro_direct_initialized_ = false;
+            ALOGE("%s %d Failed to create gralloc direct channel", __func__,
+                  __LINE__);
+          } else {
+            gyro_direct_channel_ = chan;
+          }
+        });
+  }
+
+  // configure gyro direct channel.
+  gyro_direct_channel_->configure(
+      sensor_info_.sensorHandle, gyro_direct_rate_level_,
+      [&](const auto& t, auto result) {
+        if (result != Result::OK) {
+          ALOGE("%s %d Cannot configure direct report", __func__, __LINE__);
+        }
+      });
+
+  gyro_direct_enabled_ = true;
+
+  ALOGV("%s %d Direct sensor mdoe: %d %d", __func__, __LINE__,
+        gyro_direct_initialized_, gyro_direct_enabled_);
+  return OK;
+}
+
+void GoogGyroDirect::QueryGyroEventsBetweenTimestamps(
+    int64_t start_time, int64_t end_time,
+    std::vector<int64_t>* event_timestamps, std::vector<float>* motion_vector_x,
+    std::vector<float>* motion_vector_y, std::vector<float>* motion_vector_z,
+    std::vector<int64_t>* event_arrival_timestamps) const {
+  event_timestamps->clear();
+  motion_vector_x->clear();
+  motion_vector_y->clear();
+  motion_vector_z->clear();
+  event_arrival_timestamps->clear();
+
+  if (gyro_direct_channel_addr_ == nullptr) {
+    return;
+  }
+  sensors_event_t* buffer_head_ptr =
+      reinterpret_cast<sensors_event_t*>(gyro_direct_channel_addr_);
+
+  // Copy shared buffer content to local buffer as lock is lacking.
+  std::vector<sensors_event_t> event_vector;
+  event_vector.insert(event_vector.begin(), buffer_head_ptr,
+                      buffer_head_ptr + gyro_direct_buf_length_);
+
+  int64_t event_arrival_time = elapsedRealtimeNano();
+  int64_t earliest_timestamp = LLONG_MAX;
+  int head_pos = -1;
+  int num_events = event_vector.size();
+  for (int i = 0; i < num_events; ++i) {
+    if (event_vector[i].timestamp != 0 &&
+        event_vector[i].timestamp < earliest_timestamp) {
+      earliest_timestamp = event_vector[i].timestamp;
+      head_pos = i;
+    }
+  }
+
+  // Fill events within timestamps range to output vectors.
+  if (head_pos == -1) {
+    return;
+  }
+  for (int i = 0; i < num_events; ++i) {
+    int index = (head_pos + i) % num_events;
+    if (event_vector[index].timestamp <= start_time) {
+      continue;
+    }
+    if (event_vector[index].timestamp > end_time) {
+      break;
+    }
+
+    event_timestamps->push_back(event_vector[index].timestamp);
+    motion_vector_x->push_back(event_vector[index].data[0]);
+    motion_vector_y->push_back(event_vector[index].data[1]);
+    motion_vector_z->push_back(event_vector[index].data[2]);
+    event_arrival_timestamps->push_back(event_arrival_time);
+  }
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/goog_gyro_direct.h b/common/sensor_listener/goog_gyro_direct.h
new file mode 100644
index 0000000..bd831c9
--- /dev/null
+++ b/common/sensor_listener/goog_gyro_direct.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GYRO_DIRECT_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GYRO_DIRECT_H_
+
+#include <android/frameworks/sensorservice/1.0/ISensorManager.h>
+#include <android/hardware/sensors/1.0/types.h>
+
+#include "goog_gralloc_wrapper.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+// GoogGryoDirect class will create gyro direct channel listener.
+// It fetches gyro events (timestamp, azimuth, pitch, roll, fetching timestamp)
+// from a shared buffer.
+// Sample usage:
+//   std::unique_ptr<GoogGyroDirect> direct_ptr_ = GoogGyroDirect::Create(
+//       /*gyro_sampling_period_us=*/5000,
+//       /*event_queue_size=*/20);
+//   if (direct_ptr->GetSensorEnablingStatus()) {
+//     std::vector<int64_t> event_timestamps;
+//     std::vector<float> motion_vector_x;
+//     std::vector<float> motion_vector_y;
+//     std::vector<float> motion_vector_z;
+//     std::vector<int64_t> arrival_timestamps;
+//     // Get all available gyro events.
+//     direct_ptr->QueryGyroEventsBetweenTimestamps(
+//         /*start_time=*/0,
+//         /*end_time=*/LLONG_MAX,
+//         &event_timestamps,
+//         &motion_vector_x,
+//         &motion_vector_y,
+//         &motion_vector_z,
+//         &arrival_timestamps);
+//   }
+class GoogGyroDirect {
+ public:
+  // Create a new instance of GoogGyroDirect.
+  // Inputs:
+  //   gyro_sampling_period_us: gyro sampling period in us (1e-6s).
+  //     Direct channel only supports following rates:
+  //       NORMAL,          // nominal 50Hz
+  //       FAST,            // nominal 200Hz
+  //       VERY_FAST,       // nominal 800Hz
+  //     gyro_sampling_period_us will be converted to closest rate level, i.e.,
+  //     sampling_frequency = 1e6 / gyro_sampling_period_us,
+  //     sampling_frequency < 110Hz -> RateLevel:NORMAL,
+  //     110Hz <= sampling_frequency < 440Hz -> RateLevel:FAST,
+  //     sampling_frequency >= 440Hz -> RateLevel::VERY_FAST.
+  //     Default sampling period is 5000us (200Hz -> RateLevel:FAST).
+  //  event_queue_size: size of event queue to hold incoming sensor events.
+  //    Default value is set to 20.
+  static std::unique_ptr<GoogGyroDirect> Create(
+      int64_t gyro_sampling_period_us = kDefaultSamplingPeriodUs,
+      size_t event_queue_size = kDefaultEventQueueSize);
+
+  // Destructor.
+  // Destroy and free the resources of a GoogGyroDirect.
+  ~GoogGyroDirect();
+
+  // Get whether gyro direct channel is enabled.
+  // Return true if gyro direct channel is enabled, false otherwise.
+  bool GetSensorEnablingStatus() const {
+    return gyro_direct_enabled_;
+  }
+
+  // Query gyro events between between the range (start_time, end_time].
+  // Inputs:
+  //   start_time: queried events' timestamps must be > start_time.
+  //   end_time: queried events' timestamps must be <= end_time.
+  // Outputs:
+  //   event_timestamps: pointer of vector to hold queried events' timestamps.
+  //   motion_vector_x: pointer of vector to hold queried events' x axis data.
+  //   motion_vector_y: pointer of vector to hold queried events' y axis data.
+  //   motion_vector_z: pointer of vector to hold queried events' z axis data.
+  //   event_arrival_timestamps: pointer of vector to hold arrival times.
+  // Event timestamps, data and arrival timestamps are listed in chronological
+  // order, i.e., event_timestamps[0], motion_vector_x[0],
+  // motion_vector_y[0], motion_vector_z[0], and event_arrival_timestamps[0]
+  // hold the earliest data.
+  // motion_vector_x, motion_vector_y, motion_vector_z are azimuth, pitch, roll
+  // respectively.
+  void QueryGyroEventsBetweenTimestamps(
+      int64_t start_time, int64_t end_time,
+      std::vector<int64_t>* event_timestamps,
+      std::vector<float>* motion_vector_x, std::vector<float>* motion_vector_y,
+      std::vector<float>* motion_vector_z,
+      std::vector<int64_t>* event_arrival_timestamps) const;
+
+  // Enable GoogGyroDirect to query events from direct channel.
+  // Return 0 on success.
+  status_t EnableDirectChannel();
+
+  // Disable GoogGyroDirect.
+  // Return 0 on success.
+  status_t DisableDirectChannel();
+
+ private:
+  // Constructor.
+  // Create and initialize GoogGyroDirect.
+  // Inputs:
+  //   rate_level: gyro sampling rate level.
+  //     See definition at android/hardware/interfaces/sensors/1.0/types.hal.
+  //   gyro_direct_buf_length: shared buffer length to hold gyro events.
+  GoogGyroDirect(::android::hardware::sensors::V1_0::RateLevel rate_level,
+                 size_t gyro_direct_buf_length);
+
+  // Whether gyro direct channel buffer is initialized.
+  bool gyro_direct_initialized_;
+
+  // Whether gyro direct channel is enabled.
+  bool gyro_direct_enabled_;
+
+  // Gyro events sampling rate level.
+  // See definition at android/hardware/interfaces/sensors/1.0/types.hal.
+  ::android::hardware::sensors::V1_0::RateLevel gyro_direct_rate_level_;
+
+  // Shared buffer length to hold gyro events.
+  size_t gyro_direct_buf_length_;
+
+  // Unique pointer to GoogGrallocWrapper.
+  std::unique_ptr<GoogGrallocWrapper> goog_gralloc_wrapper_ptr_;
+
+  // Strong pointer to IDirectReportChannel.
+  sp<::android::frameworks::sensorservice::V1_0::IDirectReportChannel>
+      gyro_direct_channel_;
+
+  // Const pointer to direct channel native buffer handle.
+  const native_handle_t* gyro_direct_channel_native_buf_handle_;
+
+  // Gyro direct channel address.
+  void* gyro_direct_channel_addr_;
+
+  // Gyro sensor info.
+  ::android::hardware::sensors::V1_0::SensorInfo sensor_info_;
+
+  // Default sensor event queue size is set to 20.
+  static constexpr size_t kDefaultEventQueueSize = 20;
+
+  // Default sensor sampling period is set to 5000us(1e-6s), i.e. 200Hz.
+  static constexpr int64_t kDefaultSamplingPeriodUs = 5000;
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_GYRO_DIRECT_H_
diff --git a/common/sensor_listener/goog_sensor_environment.cc b/common/sensor_listener/goog_sensor_environment.cc
new file mode 100644
index 0000000..5aac157
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_environment.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "goog_sensor_environment"
+
+#include "goog_sensor_environment.h"
+
+#include "utils/Errors.h"
+#include "utils/Log.h"
+#include "utils/RefBase.h"
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+
+using ::android::frameworks::sensorservice::V1_0::ISensorManager;
+using ::android::frameworks::sensorservice::V1_0::Result;
+using ::android::hardware::sensors::V1_0::SensorInfo;
+using ::android::hardware::sensors::V1_0::SensorType;
+
+SensorType GetHalSensorType(EnvironmentSensorType sensor_type) {
+  switch (sensor_type) {
+    case EnvironmentSensorType::DEVICE_ORIENTATION:
+      return SensorType::DEVICE_ORIENTATION;
+    case EnvironmentSensorType::LIGHT:
+      return SensorType::LIGHT;
+    case EnvironmentSensorType::PROXIMITY:
+      return SensorType::PROXIMITY;
+    default:
+      LOG_FATAL("Unknown sensor type %d", static_cast<int>(sensor_type));
+      return SensorType::DEVICE_ORIENTATION;
+  }
+}
+
+}  // namespace
+
+GoogSensorEnvironment::GoogSensorEnvironment(
+    EnvironmentSensorType environment_sensor_type, size_t event_queue_size)
+    : GoogSensorWrapper(event_queue_size),
+      environment_sensor_type_(environment_sensor_type) {
+  ALOGD("%s %d create sensor %s", __func__, __LINE__,
+        GetSensorName(environment_sensor_type));
+}
+
+GoogSensorEnvironment::~GoogSensorEnvironment() {
+  Disable();
+  ALOGD("%s %d destroy sensor %s", __func__, __LINE__, GetSensorName());
+}
+
+sp<GoogSensorEnvironment> GoogSensorEnvironment::Create(
+    EnvironmentSensorType environment_sensor_type, size_t event_queue_size) {
+  // Check sensor_type validity.
+  int environment_sensor_type_index = static_cast<int>(environment_sensor_type);
+  if (environment_sensor_type_index >=
+      static_cast<int>(EnvironmentSensorType::TOTAL_NUM)) {
+    ALOGE("%s %d unsupported environment sensor type %d", __func__, __LINE__,
+          environment_sensor_type_index);
+    return nullptr;
+  }
+
+  // Create sensor.
+  sp<GoogSensorEnvironment> sensor_ptr =
+      new GoogSensorEnvironment(environment_sensor_type, event_queue_size);
+  if (sensor_ptr == nullptr) {
+    ALOGE("%s %d failed to create GoogSensorEnvironment for %s", __func__,
+          __LINE__, GetSensorName(environment_sensor_type));
+  } else {
+    // Enable sensor.
+    status_t result = sensor_ptr->Enable();
+    if (result != 0) {
+      ALOGE("%s %d failed to enable GoogSensorEnvironment for %s", __func__,
+            __LINE__, sensor_ptr->GetSensorName());
+    } else {
+      ALOGD("%s %d successfully enabled GoogSensorEnvironment for %s", __func__,
+            __LINE__, sensor_ptr->GetSensorName());
+    }
+  }
+  return sensor_ptr;
+}
+
+void GoogSensorEnvironment::GetLatestNSensorEvents(
+    int num_sample, std::vector<int64_t>* event_timestamps,
+    std::vector<float>* event_data,
+    std::vector<int64_t>* event_arrival_timestamps) const {
+  event_timestamps->clear();
+  event_data->clear();
+  event_arrival_timestamps->clear();
+
+  if (num_sample < 0) {
+    return;
+  }
+  std::lock_guard<std::mutex> l(event_buffer_lock_);
+  int start_index =
+      std::max(0, static_cast<int>(event_buffer_.size()) - num_sample);
+  auto event = event_buffer_.begin();
+  std::advance(event, start_index);
+  for (; event != event_buffer_.end(); ++event) {
+    event_timestamps->push_back(event->sensor_event.timestamp);
+    event_data->push_back(event->sensor_event.u.scalar);
+    event_arrival_timestamps->push_back(event->event_arrival_time_ns);
+    if (event_arrival_timestamps->size() >= num_sample) {
+      break;
+    }
+  }
+}
+
+int32_t GoogSensorEnvironment::GetSensorHandle() {
+  sp<ISensorManager> manager = ISensorManager::getService();
+  if (manager == nullptr) {
+    ALOGE("%s %d Cannot get ISensorManager for sensor %s", __func__, __LINE__,
+          GetSensorName());
+    return -1;
+  }
+  bool found = false;
+  SensorInfo sensor;
+  manager->getDefaultSensor(GetHalSensorType(environment_sensor_type_),
+                            [&sensor, &found](const auto& info, auto result) {
+                              if (result != Result::OK) {
+                                ALOGE("%s %d Cannot find default sensor",
+                                      __func__, __LINE__);
+                                return;
+                              }
+                              sensor = info;
+                              found = true;
+                            });
+
+  if (found) {
+    ALOGD("%s %d handle for %s is found.", __func__, __LINE__, GetSensorName());
+  } else {
+    ALOGE("%s %d handle for %s is not found!", __func__, __LINE__,
+          GetSensorName());
+  }
+  return found ? sensor.sensorHandle : -1;
+}
+
+const char* GoogSensorEnvironment::GetSensorName(
+    EnvironmentSensorType sensor_type) {
+  static constexpr const char* kSensorNames[] = {"device_orientation", "light",
+                                                 "proximity"};
+  return kSensorNames[static_cast<int>(sensor_type)];
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
\ No newline at end of file
diff --git a/common/sensor_listener/goog_sensor_environment.h b/common/sensor_listener/goog_sensor_environment.h
new file mode 100644
index 0000000..0106331
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_environment.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_ENVIRONMENT_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_ENVIRONMENT_H_
+
+#include "goog_sensor_wrapper.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+// Supported environment sensor types:
+// ::android::hardware::sensors::V1_0::SensorType::DEVICE_ORIENTATION
+// ::android::hardware::sensors::V1_0::SensorType::LIGHT
+// ::android::hardware::sensors::V1_0::SensorType::PROXIMITY
+enum class EnvironmentSensorType {
+  DEVICE_ORIENTATION = 0,
+  LIGHT,
+  PROXIMITY,
+  TOTAL_NUM
+};
+
+// Environement sensor listener class.
+// It will create a environment sensor listener whose event data type is float.
+// DEVICE_ORIENTATION sensor event data is 0, 1, 2, 3:
+//   0: device is in default orientation (Y axis is vertical and points up).
+//   1: device is rotated 90 degrees counter-clockwise from default
+//      orientation (X axis is vertical and points up).
+//   2: device is rotated 180 degrees from default orientation (Y axis is
+//      vertical and points down).
+//   3: device is rotated 90 degrees clockwise from default orientation
+//      (X axis is vertical and points down).
+// LIGHT sensor event data is in SI lux units.
+// PROXIMITY sensor event data is 0 or 5:
+//   0 means closest object is nearby, 5 means closest object is far.
+// Current supported sensor types are listed in EnvironmentSensorType.
+// Sample usage:
+//   sp<GoogSensorEnvironment> sensor_ptr =
+//       GoogSensorEnvironment::Create(EnvironmentSensorType::LIGHT,
+//                                     /*event_queue_size=*/20);
+//   std::function<void(const ExtendedSensorEvent& event)> callback =
+//       [](const ExtendedSensorEvent& event) {
+//         // customized operations.
+//      };
+//   sensor_ptr->SetEventProcessor(callback);
+//   if (sensor_ptr->GetSensorenablingStatus()) {
+//     std::vector<int64_t> event_timestamps;
+//     std::vector<float> event_data;
+//     std::vector<int64_t> arrival_timestamps;
+//     sensor_ptr->GetLatestNSensorEvents(
+//         /*num_sample=*/5, &event_timestamps, &event_data,
+//         &arrival_timestamps);
+//   }
+class GoogSensorEnvironment : public GoogSensorWrapper {
+ public:
+  // Return a StrongPointer pointing to newly created GoogSensorEnvironment
+  // instance.
+  // Input:
+  //   environment_sensor_type: sensor type defined in enum class
+  //     EnvironmentSensorType.
+  //   event_queue_size: size of event queue to hold incoming sensor events.
+  static sp<GoogSensorEnvironment> Create(
+      EnvironmentSensorType environment_sensor_type,
+      size_t event_queue_size = kDefaultEventQueueSize);
+
+  // Destructor.
+  // Destroy and free the resources of a GoogSensorEnvironment.
+  ~GoogSensorEnvironment();
+
+  // Get whether sensor is enabled.
+  // Return true if sensor is enabled, false otherwise.
+  bool GetSensorEnablingStatus() const {
+    return IsEnabled();
+  }
+
+  // Get latest n sensor events' timestamps, event data and arrival times.
+  // If output vectors are not empty, latest_n_timestamps, event_data and
+  // latest_n_arrival_timestamps will be cleared first.
+  // If total samples in event_deque_ is smaller than num_sample,
+  // size of latest_n_timestamps, event_data and latest_n_arrival_timestamps
+  // will be equal to event_deque_.size().
+  // Input:
+  //   num_sample: number of latest samples to query.
+  // Outputs:
+  //   latest_n_timestamps: pointer of vector to hold timestamps.
+  //   event_data: pointer of vector to hold float type event data.
+  //   latest_n_arrival_timestamps: pointer of vector to hold arrival times.
+  // Event timestamps, data and arrival timestamps are listed in chronological
+  // order, i.e., latest_n_timestamps[0], event_data[0] and
+  // latest_n_arrival_timestamps[0] hold the earliest data.
+  void GetLatestNSensorEvents(
+      int num_sample, std::vector<int64_t>* latest_n_timestamps,
+      std::vector<float>* event_data,
+      std::vector<int64_t>* latest_n_arrival_timestamps) const;
+
+  // Get sensor name.
+  const char* GetSensorName() const {
+    return GetSensorName(environment_sensor_type_);
+  }
+
+ protected:
+  // Get environment sensor handle.
+  virtual int32_t GetSensorHandle() final;
+
+ private:
+  // Constructor.
+  // Create and initialize a GoogSensorEnvironment.
+  // Inputs:
+  //   environment_sensor_type: sensor type defined in enum class
+  //     EnvironmentSensorType.
+  //   event_queue_size: size of event queue to hold incoming sensor events.
+  GoogSensorEnvironment(EnvironmentSensorType environment_sensor_type,
+                        size_t event_queue_size);
+
+  static const char* GetSensorName(EnvironmentSensorType motion_sensor_type);
+
+  EnvironmentSensorType environment_sensor_type_;
+
+  // Default sensor event queue size is set to 20.
+  static constexpr size_t kDefaultEventQueueSize = 20;
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_ENVIRONMENT_H_
diff --git a/common/sensor_listener/goog_sensor_motion.cc b/common/sensor_listener/goog_sensor_motion.cc
new file mode 100644
index 0000000..f312ced
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_motion.cc
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "goog_sensor_motion"
+
+#include "goog_sensor_motion.h"
+
+#include <cinttypes>
+
+#include "utils/Errors.h"
+#include "utils/Log.h"
+#include "utils/Mutex.h"
+#include "utils/RefBase.h"
+#include "utils/String16.h"
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+
+using ::android::frameworks::sensorservice::V1_0::ISensorManager;
+using ::android::frameworks::sensorservice::V1_0::Result;
+using ::android::hardware::sensors::V1_0::SensorInfo;
+using ::android::hardware::sensors::V1_0::SensorType;
+
+SensorType GetHalSensorType(MotionSensorType sensor_type) {
+  switch (sensor_type) {
+    case MotionSensorType::ACCELEROMETER:
+      return SensorType::ACCELEROMETER;
+    case MotionSensorType::GRAVITY:
+      return SensorType::GRAVITY;
+    case MotionSensorType::GYROSCOPE:
+      return SensorType::GYROSCOPE;
+    case MotionSensorType::LINEAR_ACCELERATION:
+      return SensorType::LINEAR_ACCELERATION;
+    case MotionSensorType::MAGNETIC_FIELD:
+      return SensorType::MAGNETIC_FIELD;
+    default:
+      LOG_FATAL("Unknown sensor type %d", static_cast<int>(sensor_type));
+      return SensorType::ACCELEROMETER;
+  }
+}
+
+}  // namespace
+
+GoogSensorMotion::GoogSensorMotion(MotionSensorType motion_sensor_type,
+                                   int64_t sampling_period_us,
+                                   size_t event_queue_size)
+    : GoogSensorWrapper(event_queue_size, sampling_period_us),
+      motion_sensor_type_(motion_sensor_type) {
+  ALOGD("%s %d create sensor %s", __func__, __LINE__,
+        GetSensorName(motion_sensor_type));
+}
+
+GoogSensorMotion::~GoogSensorMotion() {
+  Disable();
+  ALOGD("%s %d destroy sensor %s", __func__, __LINE__, GetSensorName());
+}
+
+sp<GoogSensorMotion> GoogSensorMotion::Create(MotionSensorType motion_sensor_type,
+                                              int64_t sampling_period_us,
+                                              size_t event_queue_size) {
+  // Check sensor_type validity.
+  int motion_sensor_type_index = static_cast<int>(motion_sensor_type);
+  if (motion_sensor_type_index >= static_cast<int>(MotionSensorType::TOTAL_NUM)) {
+    ALOGE("%s %d unsupported motion sensor type %d", __func__, __LINE__,
+          motion_sensor_type_index);
+    return nullptr;
+  }
+  // Check sampling period validity.
+  if (sampling_period_us < kMinSamplingPeriodUs) {
+    ALOGE("%s %d sampling period %" PRId64 "us is too small %s", __func__,
+          __LINE__, sampling_period_us,
+          "supported smallest sampling period is 2500us");
+    return nullptr;
+  }
+
+  // Create sensor.
+  sp<GoogSensorMotion> sensor_ptr = new GoogSensorMotion(
+      motion_sensor_type, sampling_period_us, event_queue_size);
+  if (sensor_ptr == nullptr) {
+    ALOGE("%s %d failed to create GoogSensorMotion for %s", __func__, __LINE__,
+          GetSensorName(motion_sensor_type));
+  } else {
+    // Enable sensor.
+    status_t result = sensor_ptr->Enable();
+    if (result != 0) {
+      ALOGE("%s %d failed to enable GoogSensorMotion for %s", __func__,
+            __LINE__, sensor_ptr->GetSensorName());
+    } else {
+      ALOGD("%s %d successfully enabled GoogSensorMotion for %s", __func__,
+            __LINE__, sensor_ptr->GetSensorName());
+    }
+  }
+  return sensor_ptr;
+}
+
+void GoogSensorMotion::GetLatestNSensorEvents(
+    int num_sample, std::vector<int64_t>* event_timestamps,
+    std::vector<float>* motion_vector_x, std::vector<float>* motion_vector_y,
+    std::vector<float>* motion_vector_z,
+    std::vector<int64_t>* event_arrival_timestamps) const {
+  event_timestamps->clear();
+  motion_vector_x->clear();
+  motion_vector_y->clear();
+  motion_vector_z->clear();
+  event_arrival_timestamps->clear();
+
+  if (num_sample < 0) {
+    return;
+  }
+  std::lock_guard<std::mutex> l(event_buffer_lock_);
+  int start_index =
+      std::max(0, static_cast<int>(event_buffer_.size()) - num_sample);
+  auto event = event_buffer_.begin();
+  std::advance(event, start_index);
+  for (; event != event_buffer_.end(); ++event) {
+    event_timestamps->push_back(event->sensor_event.timestamp);
+    motion_vector_x->push_back(event->sensor_event.u.vec3.x);
+    motion_vector_y->push_back(event->sensor_event.u.vec3.y);
+    motion_vector_z->push_back(event->sensor_event.u.vec3.z);
+    event_arrival_timestamps->push_back(event->event_arrival_time_ns);
+  }
+}
+
+void GoogSensorMotion::QuerySensorEventsBetweenTimestamps(
+    int64_t start_time, int64_t end_time,
+    std::vector<int64_t>* event_timestamps, std::vector<float>* motion_vector_x,
+    std::vector<float>* motion_vector_y, std::vector<float>* motion_vector_z,
+    std::vector<int64_t>* event_arrival_timestamps) const {
+  event_timestamps->clear();
+  motion_vector_x->clear();
+  motion_vector_y->clear();
+  motion_vector_z->clear();
+  event_arrival_timestamps->clear();
+
+  std::lock_guard<std::mutex> l(event_buffer_lock_);
+  for (const auto& event : event_buffer_) {
+    int64_t event_time = event.sensor_event.timestamp;
+    if (event_time <= start_time || event_time > end_time) {
+      continue;
+    }
+    event_timestamps->push_back(event_time);
+    motion_vector_x->push_back(event.sensor_event.u.vec3.x);
+    motion_vector_y->push_back(event.sensor_event.u.vec3.y);
+    motion_vector_z->push_back(event.sensor_event.u.vec3.z);
+    event_arrival_timestamps->push_back(event.event_arrival_time_ns);
+  }
+}
+
+int32_t GoogSensorMotion::GetSensorHandle() {
+  sp<ISensorManager> manager = ISensorManager::getService();
+  if (manager == nullptr) {
+    ALOGE("%s %d Cannot get ISensorManager for sensor %s", __func__, __LINE__,
+          GetSensorName());
+    return -1;
+  }
+  bool found = false;
+  SensorInfo sensor;
+  manager->getDefaultSensor(GetHalSensorType(motion_sensor_type_),
+                            [&sensor, &found](const auto& info, auto result) {
+                              if (result != Result::OK) {
+                                ALOGE("%s %d Cannot find default sensor",
+                                      __func__, __LINE__);
+                                return;
+                              }
+                              sensor = info;
+                              found = true;
+                            });
+
+  if (found) {
+    ALOGD("%s %d handle for %s is found.", __func__, __LINE__, GetSensorName());
+  } else {
+    ALOGE("%s %d handle for %s is not found!", __func__, __LINE__,
+          GetSensorName());
+  }
+  return found ? sensor.sensorHandle : -1;
+}
+
+const char* GoogSensorMotion::GetSensorName(MotionSensorType sensor_type) {
+  static constexpr const char* kSensorNames[] = {
+      "accelerometer", "gravity", "gyroscope", "linear_acceleration",
+      "magnetic_field"};
+  return kSensorNames[static_cast<int>(sensor_type)];
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
\ No newline at end of file
diff --git a/common/sensor_listener/goog_sensor_motion.h b/common/sensor_listener/goog_sensor_motion.h
new file mode 100644
index 0000000..515512b
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_motion.h
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_MOTION_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_MOTION_H_
+
+#include "goog_sensor_wrapper.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+// Supported motion sensor types:
+// ::android::hardware::sensors::V1_0::SensorType::ACCELEROMETER
+// ::android::hardware::sensors::V1_0::SensorType::GRAVITY
+// ::android::hardware::sensors::V1_0::SensorType::GYROSCOPE
+// ::android::hardware::sensors::V1_0::SensorType::LINEAR_ACCELERATION
+// ::android::hardware::sensors::V1_0::SensorType::MAGNETIC_FIELD
+enum class MotionSensorType : int {
+  ACCELEROMETER = 0,
+  GRAVITY,
+  GYROSCOPE,
+  LINEAR_ACCELERATION,
+  MAGNETIC_FIELD,
+  TOTAL_NUM
+};
+
+// Motion sensor listener class.
+// It will create a motion sensor listener whose event consists of three
+// floating numbers corresponding to data in x, y, z axes. Please refer to
+// Sensor Coorindate System section in
+// https://developer.android.com/guide/topics/sensors/sensors_overview
+// to check definition of x, y, z axis.
+// ACCELEROMETER sensor event data's measure unit is m/s^2:
+//   motion_vector_x: Acceleration force along the x axis (including gravity).
+//   motion_vector_y: Acceleration force along the y axis (including gravity).
+//   motion_vector_z: Acceleration force along the z axis (including gravity).
+// GRAVITY sensor event data's measure unit is m/s^2:
+//   motion_vector_x: Force of gravity along the x axis.
+//   motion_vector_y: Force of gravity along the y axis.
+//   motion_vector_z: Force of gravity along the z axis.
+// GYROSCOPE sensor event data's measure unit is rad/s:
+//   motion_vector_x: Rate of rotation around the x axis.
+//   motion_vector_y: Rate of rotation around the y axis.
+//   motion_vector_z: Rate of rotation around the z axis.
+// LINEAR_ACCELERATION sensor event data's measure unit is m/s^2:
+//   motion_vector_x: Acceleration force along the x axis (excluding gravity).
+//   motion_vector_y: Acceleration force along the y axis (excluding gravity).
+//   motion_vector_z: Acceleration force along the z axis (excluding gravity).
+// MAGNETIC_FIELD sensor event data's measure unit is micro Tesla(uT):
+//   motion_vector_x: Geomagnetic field strength along the x axis.
+//   motion_vector_y: Geomagnetic field strength along the y axis.
+//   motion_vector_z: Geomagnetic field strength along the z axis.
+// Sample usage:
+//   sp<GoogSensorMotion> sensor_ptr =
+//       GoogSensorMotion::Create(MotionSensorType::GYROSCOPE,
+//                                /*sampling_period_us=*/20000,
+//                                /*event_queue_size=*/20);
+//   std::function<void(const ExtendedSensorEvent& event)> callback =
+//       [](const ExtendedSensorEvent& event) {
+//         // customized operations.
+//      };
+//   //  Customized event callback is optional.
+//   sensor_ptr->SetEventProcessor(callback);
+//   if (sensor_ptr->GetSensorenablingStatus()) {
+//     std::vector<int64_t> event_timestamps;
+//     std::vector<float> motion_vector_x;
+//     std::vector<float> motion_vector_y;
+//     std::vector<float> motion_vector_z;
+//     std::vector<int64_t> arrival_timestamps;
+//     sensor_ptr->GetLatestNSensorEvents(
+//         /*num_sample=*/5, &event_timestamps, &motion_vector_x,
+//         &motion_vector_y, &motion_vector_z, &arrival_timestamps);
+//   }
+class GoogSensorMotion : public GoogSensorWrapper {
+ public:
+  // Return a StrongPointer pointing to newly created GoogSensorMotion
+  // instance.
+  // Inputs:
+  //   motion_sensor_type: sensor type defined in enum class MotionSensorType.
+  //   sampling_period_us: gyro sampling period in us (1e-6s). Sampling period
+  //     should be >= 2500us as system can only support up to 400Hz frequency.
+  //     If sampling_period_us < 2500us, a nullptr will be returned.
+  //   event_queue_size: size of event queue to hold incoming sensor events.
+  static sp<GoogSensorMotion> Create(
+      MotionSensorType motion_sensor_type,
+      int64_t sampling_period_us = kDefaultSamplingPeriodUs,
+      size_t event_queue_size = kDefaultEventQueueSize);
+
+  // Destructor.
+  // Destroy and free the resources of a GoogSensorMotion.
+  ~GoogSensorMotion();
+
+  // Get whether sensor is enabled.
+  // Return true if sensor is enabled, false otherwise.
+  bool GetSensorEnablingStatus() const {
+    return IsEnabled();
+  }
+
+  // Get latest n sensor events' timestamps, event data and arrival times.
+  // If output vectors are not empty, they will be cleared first.
+  // If total samples in event_deque_ is smaller than num_sample, size of
+  // output vectors will be equal to event_deque_.size().
+  // Input:
+  //   num_sample: number of latest samples to query.
+  // Outputs:
+  //   latest_n_timestamps: pointer of vector to hold timestamps.
+  //   motion_vector_x: pointer of vector to hold event data in x axis.
+  //   motion_vector_y: pointer of vector to hold event data in y axis.
+  //   motion_vector_z: pointer of vector to hold event data in z axis.
+  //   latest_n_arrival_timestamps: pointer of vector to hold arrival times.
+  // Event timestamps, data and arrival timestamps are listed in chronological
+  // order, i.e., latest_n_timestamps[0], motion_vector_x[0],
+  // motion_vector_y[0], motion_vector_z[0], and latest_n_arrival_timestamps[0]
+  // hold the earliest data.
+  void GetLatestNSensorEvents(
+      int num_sample, std::vector<int64_t>* latest_n_timestamps,
+      std::vector<float>* motion_vector_x, std::vector<float>* motion_vector_y,
+      std::vector<float>* motion_vector_z,
+      std::vector<int64_t>* latest_n_arrival_timestamps) const;
+
+  // Query sensor events between between the range (start_time, end_time].
+  // Inputs:
+  //   start_time: queried events' timestamps must be > start_time.
+  //   end_time: queried events' timestamps must be <= end_time.
+  // Outputs:
+  //   event_timestamps: pointer of vector to hold queried events' timestamps.
+  //   motion_vector_x: pointer of vector to hold queried events' x axis data.
+  //   motion_vector_y: pointer of vector to hold queried events' y axis data.
+  //   motion_vector_z: pointer of vector to hold queried events' z axis data.
+  //   event_arrival_timestamps: pointer of vector to hold arrival times.
+  // Event timestamps, data and arrival timestamps are listed in chronological
+  // order, i.e., event_timestamps[0], motion_vector_x[0],
+  // motion_vector_y[0], motion_vector_z[0], and event_arrival_timestamps[0]
+  // hold the earliest data.
+  void QuerySensorEventsBetweenTimestamps(
+      int64_t start_time, int64_t end_time,
+      std::vector<int64_t>* event_timestamps,
+      std::vector<float>* motion_vector_x, std::vector<float>* motion_vector_y,
+      std::vector<float>* motion_vector_z,
+      std::vector<int64_t>* event_arrival_timestamps) const;
+
+  const char* GetSensorName() const {
+    return GetSensorName(motion_sensor_type_);
+  }
+
+ protected:
+  // Get motion sensor handle.
+  virtual int32_t GetSensorHandle() final;
+
+ private:
+  // Constructor.
+  // Create and initialize a GoogSensorMotion.
+  // Inputs:
+  //   motion_sensor_type: sensor type defined in enum class MotionSensorType.
+  //   sampling_period_us: gyro sampling period in us (1e-6s). Sampling period
+  //     should be >= 2500us as system can only support up to 400Hz frequency.
+  //   event_queue_size: size of event queue to hold incoming sensor events.
+  GoogSensorMotion(MotionSensorType motion_sensor_type,
+                   int64_t sampling_period_us, size_t event_queue_size);
+
+  static const char* GetSensorName(MotionSensorType motion_sensor_type);
+
+  MotionSensorType motion_sensor_type_;
+
+  static constexpr size_t kDefaultEventQueueSize = 20;
+  static constexpr int64_t kDefaultSamplingPeriodUs = 20000;  // = 50 Hz
+  static constexpr int64_t kMinSamplingPeriodUs = 2500;       // = 400 Hz
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_MOTION_H_
diff --git a/common/sensor_listener/goog_sensor_sync.cc b/common/sensor_listener/goog_sensor_sync.cc
new file mode 100644
index 0000000..3679878
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_sync.cc
@@ -0,0 +1,258 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "goog_sensor_sync"
+
+#include "goog_sensor_sync.h"
+
+#include <inttypes.h>
+#include <stdint.h>
+#include <string.h>
+
+#include <algorithm>
+#include <cmath>
+#include <deque>
+
+#include "utils/Errors.h"
+#include "utils/Log.h"
+#include "utils/RefBase.h"
+#include "utils/String16.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+using ::android::frameworks::sensorservice::V1_0::ISensorManager;
+using ::android::frameworks::sensorservice::V1_0::Result;
+using ::android::hardware::sensors::V1_0::SensorInfo;
+
+GoogSensorSync::GoogSensorSync(uint8_t cam_id, size_t event_queue_size)
+    : GoogSensorWrapper(event_queue_size), cam_id_(cam_id) {
+  ALOGI("%s %d Sensor sync camera ID: %d", __func__, __LINE__,
+        static_cast<int>(cam_id_));
+}
+
+GoogSensorSync::~GoogSensorSync() {
+  Disable();
+  if (sync_cnt_ != 0) {
+    ALOGI("%s %d Total failure/sync for camera %d: %d/%d", __func__, __LINE__,
+          static_cast<int>(cam_id_), sync_failure_cnt_, sync_cnt_);
+  }
+  if (match_cnt_ != 0) {
+    ALOGI("%s %d Total failure/match for camera %d: %d/%d", __func__, __LINE__,
+          static_cast<int>(cam_id_), match_failure_cnt_, match_cnt_);
+  }
+}
+
+sp<GoogSensorSync> GoogSensorSync::Create(uint8_t cam_id,
+                                          size_t event_queue_size) {
+  sp<GoogSensorSync> sensor_sync_ptr =
+      new GoogSensorSync(cam_id, event_queue_size);
+  if (sensor_sync_ptr == nullptr) {
+    ALOGE("%s %d failed to create GoogSensorSync.", __func__, __LINE__);
+  } else {
+    status_t result = sensor_sync_ptr->Enable();
+    if (result != 0) {
+      ALOGE("%s %d failed to enable GoogSensorSync.", __func__, __LINE__);
+    } else {
+      ALOGI("%s %d successfully enabled GoogSensorSync.", __func__, __LINE__);
+    }
+  }
+  return sensor_sync_ptr;
+}
+
+void GoogSensorSync::ExtractFrameIdAndBoottimeTimestamp(
+    const ExtendedSensorEvent& event, int64_t* frame_id,
+    int64_t* timestamp_boottime) const {
+  // Cast away const-ness.
+  float* data = const_cast<float*>(&event.sensor_event.u.data[0]);
+  // Reinterpret_cast to uint64_t pointer, since sensor hal concatenates
+  // two 32-bit floating point to pass 1 64-bit uint64_t data.
+  uint64_t* vsync_data = reinterpret_cast<uint64_t*>(data);
+  // Static_cast from uint64_t to int64_t to align with vsync timestamp format.
+  *frame_id = static_cast<int64_t>(vsync_data[1]);
+  *timestamp_boottime = static_cast<int64_t>(vsync_data[2]);
+}
+
+uint64_t GoogSensorSync::ExtractAocEventCounter(const ExtendedSensorEvent& event) {
+  // AoC sensor Event is one uint64_t data. It can be accessed with stepCount
+  // from the data union.
+  return event.sensor_event.u.stepCount;
+}
+
+void GoogSensorSync::GetLatestNSamples(
+    int num_sample, std::vector<int64_t>* latest_n_vsync_timestamps,
+    std::vector<int64_t>* latest_n_frame_ids,
+    std::vector<int64_t>* latest_n_boottime_timestamps,
+    std::vector<int64_t>* latest_n_arrival_timestamps) const {
+  if (latest_n_vsync_timestamps == nullptr || latest_n_frame_ids == nullptr ||
+      latest_n_boottime_timestamps == nullptr ||
+      latest_n_arrival_timestamps == nullptr) {
+    return;
+  }
+  latest_n_vsync_timestamps->clear();
+  latest_n_frame_ids->clear();
+  latest_n_boottime_timestamps->clear();
+  latest_n_arrival_timestamps->clear();
+
+  if (num_sample < 0) {
+    return;
+  }
+  std::lock_guard<std::mutex> el(event_buffer_lock_);
+  int start_index =
+      std::max(0, static_cast<int>(event_buffer_.size()) - num_sample);
+  auto event = event_buffer_.begin();
+  std::advance(event, start_index);
+  for (; event != event_buffer_.end(); ++event) {
+    latest_n_vsync_timestamps->push_back(event->sensor_event.timestamp);
+    latest_n_arrival_timestamps->push_back(event->event_arrival_time_ns);
+    int64_t frame_id, boottime_timestamp;
+    ExtractFrameIdAndBoottimeTimestamp(*event, &frame_id, &boottime_timestamp);
+    latest_n_frame_ids->push_back(frame_id);
+    latest_n_boottime_timestamps->push_back(boottime_timestamp);
+  }
+}
+
+int32_t GoogSensorSync::GetSensorHandle() {
+  sp<ISensorManager> manager = ISensorManager::getService();
+  if (manager == nullptr) {
+    ALOGE("%s %d Cannot get ISensorManager", __func__, __LINE__);
+    return -1;
+  }
+  SensorInfo info;
+  bool find = false;
+  static constexpr size_t kMaxVsyncNameSize = 16;
+  char sensor_for_camera[kMaxVsyncNameSize];
+  snprintf(sensor_for_camera, sizeof(sensor_for_camera), "camera v-sync %d",
+           cam_id_);
+
+  manager->getSensorList(
+      [&info, &find, &sensor_for_camera](const auto& list, auto result) {
+        if (result != Result::OK) {
+          return;
+        }
+        for (const SensorInfo& item : list) {
+          if (item.typeAsString == "com.google.sensor.camera_vsync") {
+            ALOGV("%s %d Enumerating sensor %s %s %zu %zu", __func__, __LINE__,
+                  item.name.c_str(), sensor_for_camera,
+                  strlen(item.name.c_str()), strlen(sensor_for_camera));
+            if (!strncasecmp(item.name.c_str(), sensor_for_camera,
+                             strlen(sensor_for_camera))) {
+              info = item;
+              find = true;
+              break;
+            }
+          }
+        }
+      });
+
+  if (find) {
+    ALOGI("%s %d handle for %d is found. Sensor name %s", __func__, __LINE__,
+          static_cast<int>(cam_id_), info.name.c_str());
+  } else {
+    ALOGE("%s %d handle for %d is not found!", __func__, __LINE__,
+          static_cast<int>(cam_id_));
+  }
+  return find ? info.sensorHandle : -1;
+}
+
+int64_t GoogSensorSync::SyncTimestamp(int64_t timestamp) {
+  status_t ret;
+
+  if (!IsEnabled()) {
+    ALOGE("%s %d sensor_sync sensor is not enabled", __func__, __LINE__);
+    return timestamp;
+  }
+
+  std::lock_guard<std::mutex> el(event_buffer_lock_);
+  int64_t min_delta = kMaxTimeDriftNs;
+  int64_t nearest_sync = timestamp;
+  for (auto event : event_buffer_) {
+    if (llabs(event.sensor_event.timestamp - timestamp) < min_delta) {
+      min_delta = llabs(event.sensor_event.timestamp - timestamp);
+      nearest_sync = event.sensor_event.timestamp;
+    }
+  }
+
+  sync_cnt_++;
+  if (min_delta == kMaxTimeDriftNs) {
+    struct timespec res;
+    clock_gettime(CLOCK_BOOTTIME, &res);
+    int64_t curr_time = (int64_t)res.tv_sec * 1000000000LL + res.tv_nsec;
+
+    ALOGV("%s %d Cannot sync timestamp for input timestamp %" PRId64
+          "at CPU time %" PRId64,
+          __func__, __LINE__, timestamp, curr_time);
+    sync_failure_cnt_++;
+    if (sync_failure_cnt_ >= kFailureThreshold) {
+      ALOGW("%s %d Camera %d: out of %d camera timestamps, %d failed to sync",
+            __func__, __LINE__, static_cast<int>(cam_id_), sync_cnt_,
+            sync_failure_cnt_);
+      sync_cnt_ = 0;
+      sync_failure_cnt_ = 0;
+    }
+  }
+
+  return nearest_sync;
+}
+
+std::optional<ExtendedSensorEvent> GoogSensorSync::FindNearestEvent(
+    int64_t timestamp) {
+  if (!IsEnabled()) {
+    ALOGE("%s %d sensor_sync sensor is not enabled", __func__, __LINE__);
+    return std::nullopt;
+  }
+  std::lock_guard<std::mutex> el(event_buffer_lock_);
+  int64_t min_delta = kMaxTimeDriftNs;
+  std::optional<ExtendedSensorEvent> nearest_event;
+  for (auto event : event_buffer_) {
+    int64_t delta = llabs(event.sensor_event.timestamp - timestamp);
+    if (delta < min_delta) {
+      min_delta = delta;
+      nearest_event = event;
+    }
+  }
+  return nearest_event;
+}
+
+int64_t GoogSensorSync::MatchTimestamp(int64_t timestamp, int64_t frame_id) {
+  if (!IsEnabled()) {
+    ALOGE("%s %d sensor_sync sensor is not enabled", __func__, __LINE__);
+    return timestamp;
+  }
+
+  std::lock_guard<std::mutex> el(event_buffer_lock_);
+  for (auto event : event_buffer_) {
+    int64_t event_frame_id, event_timestamp;
+    ExtractFrameIdAndBoottimeTimestamp(event, &event_frame_id, &event_timestamp);
+    if (frame_id == event_frame_id && timestamp == event_timestamp) {
+      match_cnt_++;
+      return event.sensor_event.timestamp;
+    }
+  }
+  match_cnt_++;
+  match_failure_cnt_++;
+  if (match_failure_cnt_ >= kFailureThreshold) {
+    ALOGW("%s %d Camera %d: out of %d camera timestamps, %d failed to match",
+          __func__, __LINE__, static_cast<int>(cam_id_), match_cnt_,
+          match_failure_cnt_);
+    match_cnt_ = 0;
+    match_failure_cnt_ = 0;
+  }
+  return timestamp;
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/goog_sensor_sync.h b/common/sensor_listener/goog_sensor_sync.h
new file mode 100644
index 0000000..eb5efdf
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_sync.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_SYNC_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_SYNC_H_
+
+#include "goog_sensor_wrapper.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+// Vsync sensor listener class.
+// It will create a Vsync listener for specific physical camera to receive
+// Vsync timestamps.
+// Sample usage:
+//   sp<GoogSensorSync> sync_ptr = GoogSensorSync::Create(0);
+//   if (sync_ptr->GetSensorenablingStatus()) {
+//     int64_t sof_vsync = sync_ptr->SyncTimestamp(sof_boottime);
+//   }
+class GoogSensorSync : public GoogSensorWrapper {
+ public:
+  // Return a StrongPointer pointing to newly created GoogSensorSync instance.
+  // Input:
+  //   cam_id: physical camera id associated with Vsync sensor.
+  //   event_queue_size: size of event queue to hold incoming Vsync events.
+  static sp<GoogSensorSync> Create(
+      uint8_t cam_id, size_t event_queue_size = kDefaultEventQueueSize);
+
+  // Destructor.
+  // Destroy and free the resources of a GoogSensorSync.
+  ~GoogSensorSync();
+
+  // Get whether sensor is enabled.
+  // Return true if sensor is enabled, false otherwise.
+  bool GetSensorEnablingStatus() const {
+    return IsEnabled();
+  }
+
+  // Get latest n vsync timestamps, boottime_timestamps, frame_ids and
+  // their arrival times.
+  // If output vectors are not empty, latest_n_vsync_timestamps,
+  // latest_n_frame_ids, latest_n_boottime_timestamps and
+  // latest_n_arrival_timestamps will be cleared first.
+  // If total samples in event_deque_ is smaller than num_sample,
+  // latest_n_vsync_timestamps->size(), latest_n_frame_ids->size(),
+  // latest_n_boottime_timestamps->size() and
+  // latest_n_arrival_timestamps->size() will be
+  // equal to event_deque_.size().
+  // Input:
+  //   num_sample: number of latest samples to query.
+  // Outputs:
+  //   latest_n_vsync_timestamps: pointer of vector to hold vsync timestamps.
+  //   latest_n_frame_ids: pointer of vector to hold frame ids.
+  //   latest_n_boottime_timestamps: pointer of vector to hold boottime
+  //     timestamps.
+  //   latest_n_arrival_timestamps: pointer of vector to hold arrival times.
+  //   First element of output vectors hold earliest samples.
+  void GetLatestNSamples(int num_sample,
+                         std::vector<int64_t>* latest_n_vsync_timestamps,
+                         std::vector<int64_t>* latest_n_frame_ids,
+                         std::vector<int64_t>* latest_n_boottime_timestamps,
+                         std::vector<int64_t>* latest_n_arrival_timestamps) const;
+
+  // Synchronize timestamp with sensor_sync. Vsync must be enabled when this
+  // is called.
+  // Inputs:
+  //   timestamp: sof timestamp from clock_boottime to be synced.
+  // Return:
+  //   synced timestamp from Vsync sensor if successfully synced,
+  //   original input timestamp if unable to sync.
+  int64_t SyncTimestamp(int64_t timestamp);
+
+  // Find event with nearing timestamp that is less than kMaxTimeDriftNs.
+  // Input:
+  //   timestamp: target boottime timestamp.
+  // Return:
+  //   ExtendedSensorEvent if found; std::nullopt if not found.
+  std::optional<ExtendedSensorEvent> FindNearestEvent(int64_t timestamp);
+
+  // Utility function, extract sof boottime timestamp and frame id from vsync
+  // sensor event's data field.
+  // Input:
+  //   event: received vsync sensor event.
+  // Outputs:
+  //   frame_id: pointer to data field holding frame id.
+  //   timestamp_boottime: pointer to data field holding sof timestamp from
+  //   clock_boottime.
+  void ExtractFrameIdAndBoottimeTimestamp(const ExtendedSensorEvent& event,
+                                          int64_t* frame_id,
+                                          int64_t* timestamp_boottime) const;
+
+  // For AoC sensor service, VSYNC sensor event payload only contains an event
+  // counter. The function only applies to slider, P21 or other devices with
+  // AoC sensor service.
+  // Input:
+  //   event: received vsync sensor event.
+  // Return:
+  //   event counter.
+  uint64_t ExtractAocEventCounter(const ExtendedSensorEvent& event);
+
+  // Find corresponding vsync sof timestamp through exact matching with input
+  // boottime sof timestamp and frame id.
+  // Inputs:
+  //   timestamp: sof timestamp from clock_boottime to be matched.
+  //   frame_id: frame id that sof timestamp is associated with.
+  // Return:
+  //   matched sof timestamp from clock_vsync if both timestamp and frame_id are
+  //   the same with those carried by vsync events, otherwise return the input
+  //   timestamp.
+  int64_t MatchTimestamp(int64_t timestamp, int64_t frame_id);
+
+ protected:
+  // Get Vsync sensor handle.
+  virtual int32_t GetSensorHandle() final;
+
+ private:
+  // Constructor.
+  // Create and initialize a GoogSensorSync.
+  // Inputs:
+  //   cam_id: Id to identify different cameras (front/back, etc.).
+  //     Usually starts from 0, but need to check camera id definitions with
+  //     individual device.
+  //   event_queue_size: size of event queue to hold incoming Vsync events.
+  GoogSensorSync(uint8_t cam_id, size_t event_queue_size);
+
+  // Default sensor event queue size is set to 5.
+  static constexpr size_t kDefaultEventQueueSize = 5;
+
+  // Maximum tolerated delta between camera time and sensor time (in ns).
+  static constexpr int64_t kMaxTimeDriftNs = 10000000;
+
+  // Threshold in logging failed SyncTimestamp.
+  static constexpr int32_t kFailureThreshold = 100;
+
+  // Counter for number of SyncTimeStamp() called.
+  int32_t sync_cnt_ = 0;
+
+  // Out of all SyncTimeStamp(), number of timestamps that failed to sync.
+  int32_t sync_failure_cnt_ = 0;
+
+  int32_t match_cnt_ = 0;
+
+  int32_t match_failure_cnt_ = 0;
+
+  // The id of the camera linked to this vsync signal.
+  uint8_t cam_id_;
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_SYNC_H_
diff --git a/common/sensor_listener/goog_sensor_wrapper.cc b/common/sensor_listener/goog_sensor_wrapper.cc
new file mode 100644
index 0000000..1885591
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_wrapper.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "android-base/thread_annotations.h"
+#define LOG_TAG "goog_sensor_wrapper"
+
+#include <assert.h>
+#include <inttypes.h>
+#include <stdint.h>
+#include <sys/time.h>
+#include <utils/Log.h>
+#include <utils/String16.h>
+#include <utils/SystemClock.h>
+
+#include <algorithm>
+#include <cmath>
+
+#include "goog_sensor_wrapper.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+using ::android::frameworks::sensorservice::V1_0::IEventQueue;
+using ::android::frameworks::sensorservice::V1_0::IEventQueueCallback;
+using ::android::frameworks::sensorservice::V1_0::ISensorManager;
+using ::android::frameworks::sensorservice::V1_0::Result;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::sensors::V1_0::Event;
+using ::android::hardware::sensors::V1_0::SensorType;
+
+// Derived IEventQueueCallbak class needed by sensor service.
+class EventQueueCallback : public IEventQueueCallback {
+ public:
+  EventQueueCallback(wp<GoogSensorWrapper> master) {
+    assert(master);
+    master_ = master;
+  }
+
+  Return<void> onEvent(const Event& e) {
+    sp<GoogSensorWrapper> master = master_.promote();
+    if (master != nullptr) {
+      master->EventCallback(e);
+    }
+    return Void();
+  }
+
+ private:
+  wp<GoogSensorWrapper> master_;
+};
+
+GoogSensorWrapper::GoogSensorWrapper(size_t event_buffer_size,
+                                     int64_t sensor_sampling_period_us)
+    : event_queue_(nullptr),
+      event_buffer_size_limit_(event_buffer_size),
+      sensor_sampling_period_us_(sensor_sampling_period_us),
+      handle_(-1),
+      enabled_(false) {
+  ALOGV("%s %d", __func__, __LINE__);
+}
+
+GoogSensorWrapper::~GoogSensorWrapper() { ALOGV("%s %d", __func__, __LINE__); }
+
+status_t GoogSensorWrapper::SetEventProcessor(
+    std::function<void(const ExtendedSensorEvent& event)> event_processor) {
+  std::lock_guard<std::mutex> l(event_processor_lock_);
+  event_processor_ = std::move(event_processor);
+  return OK;
+}
+
+status_t GoogSensorWrapper::Enable() {
+  status_t res = OK;
+  std::lock_guard<std::mutex> l(event_queue_lock_);
+
+  if (event_queue_ == nullptr) {
+    res = InitializeEventQueueLocked();
+    if (res != OK) {
+      ALOGE("%s %d initializing event queue failed: %d(%s)", __func__, __LINE__,
+            res, strerror(-res));
+      return res;
+    }
+  }
+  Return<Result> result =
+      event_queue_->enableSensor(handle_, sensor_sampling_period_us_, 0);
+  if (!result.isOk()) {
+    ALOGE("%s %d enable sensor Hidl call failed: %s", __func__, __LINE__,
+          result.description().c_str());
+    return -1;
+  }
+  if (result != Result::OK) {
+    ALOGE("%s %d enable sensor %s failed.", __func__, __LINE__,
+          toString(result).c_str());
+    return -1;
+  }
+
+  enabled_ = true;
+  return res;
+}
+
+status_t GoogSensorWrapper::Disable() {
+  std::lock_guard<std::mutex> l(event_queue_lock_);
+
+  if (enabled_) {
+    if (event_queue_ != nullptr) {
+      Return<Result> result = event_queue_->disableSensor(handle_);
+      if (!result.isOk()) {
+        ALOGE("%s %d disable sensor Hidl call failed: %s", __func__, __LINE__,
+              result.description().c_str());
+        return -1;
+      }
+      if (result != Result::OK) {
+        ALOGE("%s %d disable sensor %s failed.", __func__, __LINE__,
+              toString(result).c_str());
+        return -1;
+      }
+    }
+    enabled_ = false;
+  }
+  return OK;
+}
+
+status_t GoogSensorWrapper::InitializeEventQueueLocked() {
+  ALOGV("%s %d", __func__, __LINE__);
+
+  handle_ = GetSensorHandle();
+  if (handle_ < 0) {
+    ALOGE("%s %d Getting sensor from Sensor Manager failed.", __func__,
+          __LINE__);
+    return INVALID_OPERATION;
+  }
+
+  sp<ISensorManager> manager = ISensorManager::getService();
+  if (manager == nullptr) {
+    ALOGE("%s %d Cannot get ISensorManager", __func__, __LINE__);
+    return INVALID_OPERATION;
+  }
+
+  manager->createEventQueue(
+      new EventQueueCallback(this), [this](const auto& q, auto result) {
+        // The lock will be held when this synchronous callback executes
+        base::ScopedLockAssertion assertion(event_queue_lock_);
+        if (result != Result::OK) {
+          ALOGE("%s %d Cannot create event queue", __func__, __LINE__);
+          return;
+        }
+        event_queue_ = q;
+      });
+
+  if (event_queue_ == nullptr) {
+    return INVALID_OPERATION;
+  }
+
+  return OK;
+}
+
+int GoogSensorWrapper::EventCallback(const Event& e) {
+  ExtendedSensorEvent event;
+  memset(&event, 0, sizeof(event));
+  std::lock_guard<std::mutex> l(event_queue_lock_);
+  event.sensor_event = e;
+  if (event.sensor_event.sensorHandle == handle_ &&
+      event.sensor_event.sensorType != SensorType::ADDITIONAL_INFO) {
+    {
+      std::lock_guard<std::mutex> l(event_buffer_lock_);
+      if (event_buffer_.size() >= event_buffer_size_limit_) {
+        event_buffer_.pop_front();
+      }
+      event.event_arrival_time_ns = elapsedRealtimeNano();
+      event_buffer_.push_back(event);
+    }
+
+    std::lock_guard<std::mutex> el(event_processor_lock_);
+    if (event_processor_ != nullptr) {
+      event_processor_(event);
+    }
+  }
+
+  // Return 1 to continue receiving callbacks.
+  return 1;
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/goog_sensor_wrapper.h b/common/sensor_listener/goog_sensor_wrapper.h
new file mode 100644
index 0000000..456a163
--- /dev/null
+++ b/common/sensor_listener/goog_sensor_wrapper.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_WRAPPER_H_
+#define VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_WRAPPER_H_
+
+#include <android-base/thread_annotations.h>
+#include <android/frameworks/sensorservice/1.0/ISensorManager.h>
+#include <android/frameworks/sensorservice/1.0/types.h>
+
+#include <deque>
+#include <functional>
+#include <mutex>
+
+#include "utils/Errors.h"
+#include "utils/RefBase.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+struct ExtendedSensorEvent {
+  // Actual sensor event data.
+  ::android::hardware::sensors::V1_0::Event sensor_event;
+  // Event arrival time, i.e., time of callback being triggered.
+  int64_t event_arrival_time_ns;
+};
+
+class GoogSensorWrapper : public virtual RefBase {
+ public:
+  virtual ~GoogSensorWrapper();
+
+  // Set user-defined sensor event callback function.
+  // When sensor event arrives, event_processor will be invoked to perform
+  // user-defined callback operations.
+  // It should be called before GoogleSensorWrapper::Enable, otherwise it
+  // won't take effect.
+  status_t SetEventProcessor(
+      std::function<void(const ExtendedSensorEvent& event)> event_processor);
+
+  // Enables the sensor. When object is created, sensor is disabled by default.
+  // Returns 0 on success.
+  status_t Enable();
+
+  // Disables the sensor. Returns 0 on success.
+  status_t Disable();
+
+  // True if the sensor is currently enabled.
+  bool IsEnabled() const { return enabled_; }
+
+ protected:
+  // Input:
+  //   event_queue_size: sets size of the event queue.
+  //   sensor_sampling_period_us: sets sensor sampling period in us (1e-6s).
+  //     Default sampling rate is set to 5ms. However, sensor_sampling_period_us
+  //     has no effect on on_change type sensors like Vsync or Light.
+  GoogSensorWrapper(size_t event_queue_size,
+                    int64_t sensor_sampling_period_us = 5000);
+
+  // Virtual function to get different sensor handler, e.g., gyro handler.
+  virtual int32_t GetSensorHandle() = 0;
+
+  // Buffer of the most recent events. Oldest in the front.
+  std::deque<ExtendedSensorEvent> event_buffer_ GUARDED_BY(event_buffer_lock_);
+
+  // Lock protecting event_buffer_.
+  mutable std::mutex event_buffer_lock_;
+
+ private:
+  // Event callback function.
+  // When invoked, it will enqueue Event e to event_deque_, and further invoke
+  // user-defined callback function event_processor_.
+  int EventCallback(const ::android::hardware::sensors::V1_0::Event& e);
+
+  // Initialize sensor handler and set event_queue_.
+  status_t InitializeEventQueueLocked()
+      EXCLUSIVE_LOCKS_REQUIRED(event_queue_lock_);
+
+  // Strong pointer to IEventQueue allocated by sensor service.
+  sp<::android::frameworks::sensorservice::V1_0::IEventQueue> event_queue_
+      GUARDED_BY(event_queue_lock_);
+
+  // User-defined callback functor invoked when sensor event arrives.
+  std::function<void(const ExtendedSensorEvent& event)> event_processor_
+      GUARDED_BY(event_processor_lock_);
+
+  // Lock protecting event_queue_.
+  mutable std::mutex event_queue_lock_;
+
+  // Lock protecting event_processor_.
+  mutable std::mutex event_processor_lock_;
+
+  // Size limit for the event buffer.
+  size_t event_buffer_size_limit_;
+
+  // Sampling period to read sensor events.
+  int64_t sensor_sampling_period_us_;
+
+  // Sensor handler.
+  int handle_;
+
+  // Whether sensor is enabled.
+  bool enabled_;
+
+  friend class EventQueueCallback;
+};
+
+}  // namespace camera_sensor_listener
+}  // namespace android
+
+#endif  // VENDOR_GOOGLE_CAMERA_SENSOR_LISTENER_GOOG_SENSOR_WRAPPER_H_
diff --git a/common/sensor_listener/tests/goog_gyro_test.cc b/common/sensor_listener/tests/goog_gyro_test.cc
new file mode 100644
index 0000000..0218fad
--- /dev/null
+++ b/common/sensor_listener/tests/goog_gyro_test.cc
@@ -0,0 +1,314 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <atomic>
+#include <fstream>
+#include <functional>
+#include <iostream>
+#include <memory>
+#include <thread>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+#include <utils/SystemClock.h>
+
+#include "goog_gyro_direct.h"
+#include "goog_sensor_motion.h"
+#include "sensor_test_utils.h"
+
+namespace android {
+namespace camera_sensor_listener {
+
+class GoogGyroTest : public ::testing::Test {
+ protected:
+  void CreateGyroDirectListener(int64_t gyro_sampling_period_us,
+                                size_t event_queue_size) {
+    gyro_direct_ptr_ =
+        GoogGyroDirect::Create(gyro_sampling_period_us, event_queue_size);
+    ASSERT_NE(gyro_direct_ptr_, nullptr);
+    ASSERT_TRUE(gyro_direct_ptr_->GetSensorEnablingStatus());
+  }
+
+  void CreateGyroPollListener(int64_t gyro_sampling_period_us,
+                              size_t event_queue_size) {
+    gyro_poll_ptr_ = GoogSensorMotion::Create(
+        MotionSensorType::GYROSCOPE, gyro_sampling_period_us, event_queue_size);
+    ASSERT_NE(gyro_poll_ptr_, nullptr);
+    ASSERT_TRUE(gyro_poll_ptr_->GetSensorEnablingStatus());
+    // Set up gyro_poll_ptr_'s event callback.
+    std::function<void(const ExtendedSensorEvent& event)> enqueue_gyro_poll =
+        [&](const ExtendedSensorEvent& event) {
+          gyro_poll_timestamp_ns_.push_back(event.sensor_event.timestamp);
+          gyro_poll_azimuth_.push_back(event.sensor_event.u.vec3.x);
+          gyro_poll_pitch_.push_back(event.sensor_event.u.vec3.y);
+          gyro_poll_roll_.push_back(event.sensor_event.u.vec3.z);
+          gyro_poll_arrival_time_ns_.push_back(event.event_arrival_time_ns);
+        };
+    gyro_poll_ptr_->SetEventProcessor(enqueue_gyro_poll);
+  }
+
+  void QueryGyroDirectEventsForDuration(int time_duration_ms,
+                                        int check_period_ms) {
+    const std::chrono::milliseconds kCheckPeriod(check_period_ms);
+    int64_t check_time_duration_ns =
+        static_cast<int64_t>(time_duration_ms) * static_cast<int64_t>(1e6);
+
+    int64_t start_measure_time = android::elapsedRealtimeNano();
+    int64_t gyro_start_time = start_measure_time;
+    while (1) {
+      std::this_thread::sleep_for(kCheckPeriod);
+
+      std::vector<int64_t> gyro_timestamp_ns;
+      std::vector<float> azimuth;
+      std::vector<float> pitch;
+      std::vector<float> roll;
+      std::vector<int64_t> gyro_arrival_time_ns;
+
+      int64_t gyro_end_time = LLONG_MAX;
+      gyro_direct_ptr_->QueryGyroEventsBetweenTimestamps(
+          gyro_start_time, gyro_end_time, &gyro_timestamp_ns, &azimuth, &pitch,
+          &roll, &gyro_arrival_time_ns);
+
+      if (!gyro_timestamp_ns.empty()) {
+        EXPECT_GT(gyro_timestamp_ns.front(), gyro_start_time);
+        EXPECT_LE(gyro_timestamp_ns.back(), gyro_end_time);
+
+        gyro_direct_timestamp_ns_.insert(gyro_direct_timestamp_ns_.end(),
+                                         gyro_timestamp_ns.begin(),
+                                         gyro_timestamp_ns.end());
+        gyro_direct_azimuth_.insert(gyro_direct_azimuth_.end(), azimuth.begin(),
+                                    azimuth.end());
+        gyro_direct_pitch_.insert(gyro_direct_pitch_.end(), pitch.begin(),
+                                  pitch.end());
+        gyro_direct_roll_.insert(gyro_direct_roll_.end(), roll.begin(),
+                                 roll.end());
+        gyro_direct_arrival_time_ns_.insert(gyro_direct_arrival_time_ns_.end(),
+                                            gyro_arrival_time_ns.begin(),
+                                            gyro_arrival_time_ns.end());
+
+        gyro_start_time = gyro_timestamp_ns.back();
+      }
+
+      int64_t current_time = android::elapsedRealtimeNano();
+      if (current_time - start_measure_time > check_time_duration_ns) {
+        break;
+      }
+    }
+  }
+
+  void CheckTimestampInterval(const std::vector<int64_t>& gyro_timestamp_ns,
+                              int64_t gyro_sampling_period_us) {
+    // gyro samples' timestamp intervals need to be between
+    // [sampling_period * 0.9, sampling_period * 1.1].
+    int64_t delta_upper_ns =
+        gyro_sampling_period_us * 1000 + gyro_sampling_period_us * 100;
+    int64_t delta_lower_ns =
+        gyro_sampling_period_us * 1000 - gyro_sampling_period_us * 100;
+    size_t length = gyro_timestamp_ns.size();
+    EXPECT_GT(length, 0) << "Get zero gyro events";
+    for (int i = 1; i < length; ++i) {
+      int64_t delta = gyro_timestamp_ns[i] - gyro_timestamp_ns[i - 1];
+      EXPECT_LE(delta, delta_upper_ns)
+          << " gyro timestamp i+1 " << gyro_timestamp_ns[i]
+          << " gyro timestamp i " << gyro_timestamp_ns[i - 1] << " delta "
+          << delta;
+      EXPECT_GE(delta, delta_lower_ns)
+          << " gyro timestamp i+1 " << gyro_timestamp_ns[i]
+          << " gyro timestamp i " << gyro_timestamp_ns[i - 1] << " delta "
+          << delta;
+    }
+  }
+
+  void CheckGyroSampleLatency(const std::vector<int64_t>& gyro_timestamp_ns,
+                              const std::vector<int64_t>& gyro_arrival_time_ns) {
+    int num_latency_10ms = 0;
+    int num_latency_15ms = 0;
+    int num_sample = gyro_timestamp_ns.size();
+    EXPECT_GT(num_sample, 0) << "Get zero gyro events";
+    int64_t sum_latency = 0;
+    for (int i = 0; i < num_sample; ++i) {
+      int64_t arrival_latency = gyro_arrival_time_ns[i] - gyro_timestamp_ns[i];
+      sum_latency += arrival_latency;
+      if (arrival_latency >= 10e6) {
+        num_latency_10ms++;
+        if (arrival_latency >= 15e6) {
+          num_latency_15ms++;
+        }
+      }
+    }
+    float average_latency = static_cast<float>(sum_latency) /
+                            static_cast<float>(num_sample) / 1000000.f;
+    std::cout << "average latency " << average_latency << "ms\n";
+    float latency_rate_10ms =
+        static_cast<float>(num_latency_10ms) / static_cast<float>(num_sample);
+    float latency_rate_15ms =
+        static_cast<float>(num_latency_15ms) / static_cast<float>(num_sample);
+    EXPECT_LE(average_latency, 7.f) << "gyro average latency should be <= 7ms";
+    EXPECT_LE(latency_rate_10ms, 0.01f)
+        << "gyro latency 99 percentile should be <= 10ms";
+    EXPECT_LE(latency_rate_15ms, 0.001f)
+        << "gyro latency 99.9 percentile should be <= 15ms";
+  }
+
+  void CheckGyroTimestampOffset() {
+    // find the first corresponding gyro sample.
+    int poll_index = 0;
+    int direct_index = 0;
+    EXPECT_GT(gyro_poll_timestamp_ns_.size(), 0) << "Get zero gyro poll events";
+    EXPECT_GT(gyro_direct_timestamp_ns_.size(), 0)
+        << "Get zero gyro direct events";
+    while (poll_index < gyro_poll_timestamp_ns_.size() &&
+           direct_index < gyro_direct_timestamp_ns_.size()) {
+      if (llabs(gyro_poll_timestamp_ns_[poll_index] -
+                gyro_direct_timestamp_ns_[direct_index]) < 1e6 &&
+          gyro_poll_azimuth_[poll_index] == gyro_direct_azimuth_[direct_index] &&
+          gyro_poll_pitch_[poll_index] == gyro_direct_pitch_[direct_index] &&
+          gyro_poll_roll_[poll_index] == gyro_direct_roll_[direct_index]) {
+        break;
+      } else {
+        if (gyro_poll_timestamp_ns_[poll_index] <
+            gyro_direct_timestamp_ns_[direct_index]) {
+          ++poll_index;
+        } else {
+          ++direct_index;
+        }
+      }
+    }
+    int64_t max_offset = 0;
+    int num_offset = 0;
+    int num_samples = 0;
+
+    for (int i = poll_index, j = direct_index;
+         i < gyro_poll_timestamp_ns_.size() &&
+         j < gyro_direct_timestamp_ns_.size();
+         ++i, ++j) {
+      int64_t offset = gyro_direct_timestamp_ns_[j] - gyro_poll_timestamp_ns_[i];
+      if (offset != 0) {
+        ++num_offset;
+        if (llabs(offset) > max_offset) {
+          max_offset = llabs(offset);
+        }
+      }
+      ++num_samples;
+    }
+    EXPECT_LE(max_offset, 0) << "max timestamp drift " << max_offset << "ns\n";
+    EXPECT_LE(num_offset, 0) << "drifted samples " << num_offset << " out of "
+                             << num_samples << " samples\n";
+  }
+
+  void LogGyroDataToFile(const std::string& filename,
+                         const std::vector<int64_t>& gyro_timestamp_ns,
+                         const std::vector<float>& azimuth,
+                         const std::vector<float>& roll,
+                         const std::vector<float>& pitch,
+                         const std::vector<int64_t>& gyro_arrival_time_ns) {
+    std::string output_filename;
+    bool result = GenerateLogFilename(filename, &output_filename);
+    EXPECT_EQ(result, true) << "fail to create gyro test log file";
+    std::cout << "save gyro log to " << output_filename << "\n";
+    std::ofstream ofs(output_filename);
+    for (int i = 0; i < gyro_timestamp_ns.size(); ++i) {
+      ofs << gyro_timestamp_ns[i] << " " << azimuth[i] << " " << roll[i] << " "
+          << pitch[i] << " " << gyro_arrival_time_ns[i] << "\n";
+    }
+  }
+
+  sp<GoogSensorMotion> gyro_poll_ptr_ = nullptr;
+  std::vector<int64_t> gyro_poll_timestamp_ns_;
+  std::vector<float> gyro_poll_azimuth_;
+  std::vector<float> gyro_poll_pitch_;
+  std::vector<float> gyro_poll_roll_;
+  std::vector<int64_t> gyro_poll_arrival_time_ns_;
+
+  std::unique_ptr<GoogGyroDirect> gyro_direct_ptr_ = nullptr;
+  std::vector<int64_t> gyro_direct_timestamp_ns_;
+  std::vector<float> gyro_direct_azimuth_;
+  std::vector<float> gyro_direct_pitch_;
+  std::vector<float> gyro_direct_roll_;
+  std::vector<int64_t> gyro_direct_arrival_time_ns_;
+};
+
+TEST_F(GoogGyroTest, TestDirectGyroLatency) {
+  CreateGyroDirectListener(/*gyro_sampling_period_us=*/5000,
+                           /*event_queue_size=*/60);
+  // Test duration 5 minutes, check period 5ms.
+  // Direct gyro arrival time is the poking time,
+  // to effectively test latency, check period has to be
+  // the same as sampling period.
+  QueryGyroDirectEventsForDuration(/*gyro_check_duration_ms=*/300000,
+                                   /*gyro_check_period_ms=*/5);
+  CheckGyroSampleLatency(gyro_direct_timestamp_ns_,
+                         gyro_direct_arrival_time_ns_);
+}
+
+TEST_F(GoogGyroTest, TestGyroDirectIntervalAndDrift) {
+  CreateGyroDirectListener(/*gyro_sampling_period_us=*/5000,
+                           /*event_queue_size=*/60);
+  CreateGyroPollListener(/*gyro_sampling_period_us=*/5000,
+                         /*event_queue_size=*/20);
+  QueryGyroDirectEventsForDuration(/*time_duration_ms=*/300000,
+                                   /*check_period_ms=*/33);
+  gyro_poll_ptr_->Disable();
+  CheckTimestampInterval(gyro_direct_timestamp_ns_,
+                         /*gyro_sampling_period_us=*/5000);
+  CheckTimestampInterval(gyro_poll_timestamp_ns_,
+                         /*gyro_sampling_period_us=*/5000);
+  CheckGyroTimestampOffset();
+
+  // Save gyro poll data to txt file.
+  LogGyroDataToFile("gyro_poll_log", gyro_poll_timestamp_ns_,
+                    gyro_poll_azimuth_, gyro_poll_roll_, gyro_poll_pitch_,
+                    gyro_poll_arrival_time_ns_);
+  // Save gyro direct data to txt file.
+  LogGyroDataToFile("gyro_direct_log", gyro_direct_timestamp_ns_,
+                    gyro_direct_azimuth_, gyro_direct_roll_, gyro_direct_pitch_,
+                    gyro_direct_arrival_time_ns_);
+}
+
+TEST_F(GoogGyroTest, TestGyroEnableDisable) {
+  CreateGyroDirectListener(/*gyro_sampling_period_us=*/5000,
+                           /*event_queue_size=*/20);
+  QueryGyroDirectEventsForDuration(/*time_duration_ms=*/300,
+                                   /*check_period_ms=*/33);
+  size_t num_gyro_direct_samples = gyro_direct_timestamp_ns_.size();
+  EXPECT_GT(gyro_direct_timestamp_ns_.size(), 0);
+  gyro_direct_ptr_->DisableDirectChannel();
+  EXPECT_FALSE(gyro_direct_ptr_->GetSensorEnablingStatus());
+  const std::chrono::milliseconds kWaitPeriod(/*wait_period_ms=*/300);
+  std::this_thread::sleep_for(kWaitPeriod);
+  gyro_direct_ptr_->EnableDirectChannel();
+  EXPECT_TRUE(gyro_direct_ptr_->GetSensorEnablingStatus());
+  QueryGyroDirectEventsForDuration(/*time_duration_ms=*/300,
+                                   /*check_period_ms=*/33);
+  EXPECT_GT(gyro_direct_timestamp_ns_.size(), num_gyro_direct_samples);
+
+  CreateGyroPollListener(/*gyro_sampling_period_us=*/5000,
+                         /*event_queue_size=*/20);
+  std::this_thread::sleep_for(kWaitPeriod);
+  size_t num_gyro_poll_samples = gyro_poll_timestamp_ns_.size();
+  EXPECT_GT(num_gyro_poll_samples, 0);
+  gyro_poll_ptr_->Disable();
+  EXPECT_FALSE(gyro_poll_ptr_->GetSensorEnablingStatus());
+  std::this_thread::sleep_for(kWaitPeriod);
+  EXPECT_EQ(gyro_poll_timestamp_ns_.size(), num_gyro_poll_samples);
+  gyro_poll_ptr_->Enable();
+  EXPECT_TRUE(gyro_poll_ptr_->GetSensorEnablingStatus());
+  std::this_thread::sleep_for(kWaitPeriod);
+  EXPECT_GT(gyro_poll_timestamp_ns_.size(), num_gyro_poll_samples);
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/tests/goog_sensor_environment_test.cc b/common/sensor_listener/tests/goog_sensor_environment_test.cc
new file mode 100644
index 0000000..9249703
--- /dev/null
+++ b/common/sensor_listener/tests/goog_sensor_environment_test.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <iostream>
+#include <thread>
+
+#include <gtest/gtest.h>
+#include "utils/RefBase.h"
+
+#include "goog_sensor_environment.h"
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+
+void PrintSensorEvent(const char* sensor_name,
+                      const ExtendedSensorEvent& event) {
+  std::cout << sensor_name << " event " << event.sensor_event.u.scalar
+            << " with timestamp " << event.sensor_event.timestamp
+            << " arrives at " << event.event_arrival_time_ns << " with latency "
+            << event.event_arrival_time_ns - event.sensor_event.timestamp
+            << std::endl;
+}
+
+void CheckLatestNEvents(const sp<GoogSensorEnvironment>& sensor_ptr,
+                        int num_events) {
+  // Test GetLatestNTimestamps for 200 iterations with 33ms apart.
+  for (int i = 0; i < 200; ++i) {
+    const std::chrono::milliseconds kCheckPeriod(33 /*ms*/);
+    std::this_thread::sleep_for(kCheckPeriod);
+
+    std::vector<int64_t> timestamps;
+    std::vector<float> event_data;
+    std::vector<int64_t> arrival_timestamps;
+    sensor_ptr->GetLatestNSensorEvents(num_events, &timestamps, &event_data,
+                                       &arrival_timestamps);
+
+    // On_change sensors may have less events than num_events if
+    // not much environment change happens.
+    EXPECT_NE(timestamps.size(), 0);
+    EXPECT_EQ(timestamps.size(), event_data.size());
+    EXPECT_EQ(timestamps.size(), arrival_timestamps.size());
+    for (int i = 1; i < timestamps.size(); ++i) {
+      EXPECT_LT(timestamps[i - 1], timestamps[i]);
+    }
+  }
+}
+
+}  // namespace
+
+// Test DEVICE_ORIENTATION sensor.
+// Try to rotate device during test to get on_change sensor events.
+TEST(GoogSensorEnvironmentTest, TestDeviceOrientation) {
+  ::android::sp<GoogSensorEnvironment> ptr;
+  ptr = GoogSensorEnvironment::Create(EnvironmentSensorType::DEVICE_ORIENTATION);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(5000 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNEvents(ptr, 2);
+}
+
+// Test LIGHT sensor.
+// Try to put device under different lighting condition during test.
+TEST(GoogSensorEnvironmentTest, TestLight) {
+  ::android::sp<GoogSensorEnvironment> ptr;
+  ptr = GoogSensorEnvironment::Create(EnvironmentSensorType::LIGHT);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(5000 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNEvents(ptr, 3);
+}
+
+// Test PROXIMITY sensor.
+// Try to move hands close/away from device during test.
+TEST(GoogSensorEnvironmentTest, TestProximity) {
+  ::android::sp<GoogSensorEnvironment> ptr;
+  ptr = GoogSensorEnvironment::Create(EnvironmentSensorType::PROXIMITY);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(5000 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNEvents(ptr, 5);
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/tests/goog_sensor_motion_test.cc b/common/sensor_listener/tests/goog_sensor_motion_test.cc
new file mode 100644
index 0000000..a2bbb6d
--- /dev/null
+++ b/common/sensor_listener/tests/goog_sensor_motion_test.cc
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <iostream>
+#include <thread>
+
+#include <gtest/gtest.h>
+#include "utils/RefBase.h"
+#include "utils/SystemClock.h"
+
+#include "goog_sensor_motion.h"
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+
+void PrintMotionSensorEvent(const char* sensor_name,
+                            const ExtendedSensorEvent& event) {
+  std::cout << sensor_name << " event (" << event.sensor_event.u.vec3.x << " "
+            << event.sensor_event.u.vec3.y << " " << event.sensor_event.u.vec3.z
+            << ") with timestamp " << event.sensor_event.timestamp
+            << " arrives at " << event.event_arrival_time_ns << " with latency "
+            << event.event_arrival_time_ns - event.sensor_event.timestamp
+            << std::endl;
+}
+
+void CheckLatestNMotionEvents(const sp<GoogSensorMotion>& sensor_ptr,
+                              int num_events) {
+  // Test GetLatestNTimestamps for 200 iterations with 33ms apart.
+  for (int i = 0; i < 200; ++i) {
+    const std::chrono::milliseconds kCheckPeriod(33 /*ms*/);
+    std::this_thread::sleep_for(kCheckPeriod);
+
+    std::vector<int64_t> timestamps;
+    std::vector<float> motion_vector_x;
+    std::vector<float> motion_vector_y;
+    std::vector<float> motion_vector_z;
+    std::vector<int64_t> arrival_timestamps;
+    sensor_ptr->GetLatestNSensorEvents(num_events, &timestamps,
+                                       &motion_vector_x, &motion_vector_y,
+                                       &motion_vector_z, &arrival_timestamps);
+
+    EXPECT_EQ(timestamps.size(), num_events);
+    EXPECT_EQ(timestamps.size(), motion_vector_x.size());
+    EXPECT_EQ(timestamps.size(), motion_vector_y.size());
+    EXPECT_EQ(timestamps.size(), motion_vector_z.size());
+    EXPECT_EQ(timestamps.size(), arrival_timestamps.size());
+
+    for (int i = 1; i < timestamps.size(); ++i) {
+      EXPECT_LT(timestamps[i - 1], timestamps[i]);
+    }
+  }
+}
+
+void CheckQuerySensorEventsBetweenTimestamps(
+    const sp<GoogSensorMotion>& sensor_ptr) {
+  // Test GetLatestNTimestamps for 200 iterations with 33ms apart.
+  int64_t start_time = elapsedRealtimeNano();
+  for (int i = 0; i < 200; ++i) {
+    const std::chrono::milliseconds kCheckPeriod(33 /*ms*/);
+    std::this_thread::sleep_for(kCheckPeriod);
+    int64_t end_time = elapsedRealtimeNano();
+
+    std::vector<int64_t> timestamps;
+    std::vector<float> motion_vector_x;
+    std::vector<float> motion_vector_y;
+    std::vector<float> motion_vector_z;
+    std::vector<int64_t> arrival_timestamps;
+    sensor_ptr->QuerySensorEventsBetweenTimestamps(
+        start_time, end_time, &timestamps, &motion_vector_x, &motion_vector_y,
+        &motion_vector_z, &arrival_timestamps);
+
+    EXPECT_EQ(timestamps.size(), motion_vector_x.size());
+    EXPECT_EQ(timestamps.size(), motion_vector_y.size());
+    EXPECT_EQ(timestamps.size(), motion_vector_z.size());
+    EXPECT_EQ(timestamps.size(), arrival_timestamps.size());
+    if (!timestamps.empty()) {
+      EXPECT_GT(timestamps.front(), start_time);
+      EXPECT_LE(timestamps.back(), end_time);
+    }
+    for (int i = 1; i < timestamps.size(); ++i) {
+      EXPECT_LT(timestamps[i - 1], timestamps[i]);
+    }
+
+    start_time = end_time;
+  }
+}
+
+}  // namespace
+
+// Test ACCELEROMETER sensor.
+TEST(GoogSensorMotionTest, TestAccelerometer) {
+  ::android::sp<GoogSensorMotion> ptr;
+  ptr = GoogSensorMotion::Create(MotionSensorType::ACCELEROMETER,
+                                 2000 /*sampling_period_us*/);
+  ASSERT_EQ(ptr, nullptr);
+
+  ptr = GoogSensorMotion::Create(MotionSensorType::ACCELEROMETER,
+                                 5000 /*sampling_period_us*/);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintMotionSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(500 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNMotionEvents(ptr, 6);
+  CheckQuerySensorEventsBetweenTimestamps(ptr);
+}
+
+// Test GRAVITY sensor.
+TEST(GoogSensorMotionTest, TestGravity) {
+  ::android::sp<GoogSensorMotion> ptr;
+  ptr = GoogSensorMotion::Create(MotionSensorType::GRAVITY,
+                                 5000 /*sampling_period_us*/);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintMotionSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(500 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNMotionEvents(ptr, 6);
+  CheckQuerySensorEventsBetweenTimestamps(ptr);
+}
+
+// Test GYROSCOPE sensor.
+TEST(GoogSensorMotionTest, TestGyroscope) {
+  ::android::sp<GoogSensorMotion> ptr;
+  ptr = GoogSensorMotion::Create(MotionSensorType::GYROSCOPE,
+                                 5000 /*sampling_period_us*/);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintMotionSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(500 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNMotionEvents(ptr, 7);
+  CheckQuerySensorEventsBetweenTimestamps(ptr);
+}
+
+// Test LINEAR_ACCELERATION sensor.
+TEST(GoogSensorMotionTest, TestLinearAcceleration) {
+  ::android::sp<GoogSensorMotion> ptr;
+  ptr = GoogSensorMotion::Create(MotionSensorType::LINEAR_ACCELERATION,
+                                 5000 /*sampling_period_us*/);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintMotionSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(500 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNMotionEvents(ptr, 7);
+  CheckQuerySensorEventsBetweenTimestamps(ptr);
+}
+
+// Test MAGNETIC_FIELD sensor.
+TEST(GoogSensorMotionTest, TestMagneticField) {
+  ::android::sp<GoogSensorMotion> ptr;
+  ptr = GoogSensorMotion::Create(MotionSensorType::MAGNETIC_FIELD,
+                                 5000 /*sampling_period_us*/);
+  ASSERT_NE(ptr, nullptr);
+  ASSERT_TRUE(ptr->GetSensorEnablingStatus());
+
+  // Test customized event arrival callback.
+  const char* sensor_name = ptr->GetSensorName();
+  std::function<void(const ExtendedSensorEvent& event)> print =
+      std::bind(PrintMotionSensorEvent, sensor_name, std::placeholders::_1);
+  ptr->SetEventProcessor(print);
+
+  const std::chrono::milliseconds kStartupTime(500 /*ms*/);
+  std::this_thread::sleep_for(kStartupTime);
+  CheckLatestNMotionEvents(ptr, 7);
+  CheckQuerySensorEventsBetweenTimestamps(ptr);
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/tests/goog_sensor_sync_test.cc b/common/sensor_listener/tests/goog_sensor_sync_test.cc
new file mode 100644
index 0000000..3a72454
--- /dev/null
+++ b/common/sensor_listener/tests/goog_sensor_sync_test.cc
@@ -0,0 +1,364 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "goog_sensor_sync.h"
+
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <stdlib.h>
+
+#include <atomic>
+#include <fstream>
+#include <functional>
+#include <iostream>
+#include <sstream>
+#include <thread>
+
+#include "sensor_test_utils.h"
+#include "utils/RefBase.h"
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+// TODO(xuetu): add more camera id and test cases once verified.
+enum class CameraId {
+  kPixel4BackWide = 0,
+  kPixel4Front,
+  kPixel4BackTele,
+  kSliderBackWide,
+  kMaxNum
+};
+}  // namespace
+
+// For Pixel 4 devices, camera sensor can only be streamed when the whole
+// camera HAL is running. GoogSensorSyncTest is not a strict unit test, but
+// more a integral test to test both functionality and performance of Vsync
+// signals.
+class GoogSensorSyncTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    android::ProcessState::self()->startThreadPool();
+  }
+
+ protected:
+  uint8_t CameraIdMapping(CameraId camera_id) {
+    switch (camera_id) {
+      case CameraId::kPixel4BackWide: {
+        return 0;
+      }
+      case CameraId::kPixel4BackTele: {
+        return 2;
+      }
+      case CameraId::kSliderBackWide: {
+        return 1;  // slider back camera connects vsync 1
+      }
+      default: {
+        return static_cast<uint8_t>(CameraId::kMaxNum);
+      }
+    }
+  }
+
+  void SetupTestEnvironment(CameraId camera_id) {
+    const std::chrono::milliseconds kWaitTime(20000 /*ms*/);
+    std::this_thread::sleep_for(kWaitTime);
+    uint8_t camera_id_int = CameraIdMapping(camera_id);
+    ASSERT_NE(camera_id_int, static_cast<uint8_t>(CameraId::kMaxNum));
+    vsync_camera_id_ = static_cast<uint32_t>(camera_id_int);
+
+    // Create vsync sensor listener.
+    sensor_sync_ptr_ = GoogSensorSync::Create(camera_id_int, kEventQueueSize);
+    ASSERT_NE(sensor_sync_ptr_, nullptr);
+    ASSERT_TRUE(sensor_sync_ptr_->GetSensorEnablingStatus());
+
+    // Create customized callback function to collect incoming event data.
+    std::function<void(const ExtendedSensorEvent& event)> collect =
+        [this](const ExtendedSensorEvent& event) {
+          vsync_timestamps_.push_back(event.sensor_event.timestamp);
+          vsync_arrival_timestamps_.push_back(event.event_arrival_time_ns);
+          int64_t frame_id, boottime_timestamp;
+          sensor_sync_ptr_->ExtractFrameIdAndBoottimeTimestamp(
+              event, &frame_id, &boottime_timestamp);
+          boottime_timestamps_.push_back(boottime_timestamp);
+          frame_ids_.push_back(frame_id);
+        };
+    sensor_sync_ptr_->SetEventProcessor(collect);
+    std::this_thread::sleep_for(kWaitTime);
+  }
+
+  void TeardownTestEnvironment() {
+    const std::chrono::milliseconds kWaitTime(10000 /*ms*/);
+    std::this_thread::sleep_for(kWaitTime);
+  }
+
+  void CheckSamplesValidy(int32_t num_query) {
+    // Check total collected sample number.
+    size_t num_vsync_timestamps = vsync_timestamps_.size();
+    size_t num_vsync_arrival_timestamps = vsync_arrival_timestamps_.size();
+    size_t num_boottime_timestamps = boottime_timestamps_.size();
+    size_t num_frame_ids = frame_ids_.size();
+    EXPECT_EQ(num_vsync_timestamps, num_vsync_arrival_timestamps)
+        << "num_vsync_timestamps " << num_vsync_timestamps
+        << " num_vsync_arrival_timestamps " << num_vsync_arrival_timestamps;
+    EXPECT_EQ(num_boottime_timestamps, num_vsync_arrival_timestamps)
+        << "num_boottime_timestamps " << num_vsync_timestamps
+        << " num_vsync_arrival_timestamps " << num_vsync_arrival_timestamps;
+    EXPECT_EQ(num_frame_ids, num_vsync_arrival_timestamps)
+        << "num_frame_ids " << num_frame_ids << " num_vsync_arrival_timestamps "
+        << num_vsync_arrival_timestamps;
+    EXPECT_GT(num_vsync_timestamps, num_query * 0.9)
+        << "only received " << num_vsync_timestamps << " vsync events";
+
+    // Check boottime and vsync timestamp delta.
+    if (num_vsync_timestamps > 0 &&
+        num_vsync_timestamps == num_boottime_timestamps) {
+      for (int i = 0; i < num_vsync_timestamps; ++i) {
+        int64_t timestamp_delta = vsync_timestamps_[i] - boottime_timestamps_[i];
+        EXPECT_LE(llabs(timestamp_delta), 10000000)
+            << "delta vsync timestamp " << vsync_timestamps_[i]
+            << " and boottime timestamp " << boottime_timestamps_[i] << " is "
+            << timestamp_delta;
+      }
+    }
+
+    // Check delivery latency
+    if (num_vsync_timestamps > 0 &&
+        num_vsync_timestamps == num_vsync_arrival_timestamps) {
+      int64_t sum_latency = 0;
+      int64_t max_latency = 0;
+      int max_latency_sample_index = -1;
+      for (int i = 0; i < num_vsync_timestamps; ++i) {
+        int64_t latency = vsync_arrival_timestamps_[i] - vsync_timestamps_[i];
+        sum_latency += latency;
+        if (latency > max_latency) {
+          max_latency = latency;
+          max_latency_sample_index = i;
+        }
+      }
+      double ave_latency = static_cast<double>(sum_latency) /
+                           static_cast<double>(num_vsync_timestamps);
+      EXPECT_LE(ave_latency, 8000000)
+          << "average vsync timestamp latency " << ave_latency << "ns ("
+          << ave_latency / 1e6 << "ms) exceeds 8000000ns (8ms)";
+      EXPECT_LE(max_latency, 10000000)
+          << "vsync timestamp " << vsync_timestamps_[max_latency_sample_index]
+          << " arrives at "
+          << vsync_arrival_timestamps_[max_latency_sample_index]
+          << " with latency " << max_latency << "ns (" << max_latency / 1e6
+          << "ms)";
+    }
+
+    // Check vsync timestamp interval
+    for (int i = 1; i < num_vsync_timestamps; ++i) {
+      int64_t interval = vsync_timestamps_[i] - vsync_timestamps_[i - 1];
+      // assume fps is at least 60, sof interval should be > 12ms.
+      EXPECT_GT(interval, 12000000)
+          << "timestamp interval between " << vsync_timestamps_[i] << " and "
+          << vsync_timestamps_[i - 1] << " has interval " << interval;
+    }
+
+    // Check frame id interval
+    for (int i = 1; i < num_frame_ids; ++i) {
+      int64_t interval = frame_ids_[i] - frame_ids_[i - 1];
+      EXPECT_EQ(interval, 1)
+          << "frame id " << frame_ids_[i] << " jumps from " << frame_ids_[i - 1]
+          << " with interval " << interval;
+    }
+  }
+
+  void TestGetLatestNSamples(int num_sample_to_query,
+                             std::vector<int64_t>* vsync_timestamps,
+                             std::vector<int64_t>* frame_ids,
+                             std::vector<int64_t>* boottime_timestamps,
+                             std::vector<int64_t>* arrival_timestamps) {
+    sensor_sync_ptr_->GetLatestNSamples(num_sample_to_query, vsync_timestamps,
+                                        frame_ids, boottime_timestamps,
+                                        arrival_timestamps);
+    size_t num_vsync_timestamps = vsync_timestamps->size();
+    size_t num_frame_ids = frame_ids->size();
+    size_t num_boottime_timestamps = boottime_timestamps->size();
+    size_t num_arrival_timestamps = arrival_timestamps->size();
+    int max_sample_number = num_sample_to_query < kEventQueueSize
+                                ? num_sample_to_query
+                                : kEventQueueSize;
+    EXPECT_LE(num_vsync_timestamps, max_sample_number);
+    EXPECT_LE(num_frame_ids, max_sample_number);
+    EXPECT_LE(num_boottime_timestamps, max_sample_number);
+    EXPECT_LE(num_arrival_timestamps, max_sample_number);
+    EXPECT_EQ(num_vsync_timestamps, num_arrival_timestamps);
+    EXPECT_EQ(num_frame_ids, num_arrival_timestamps);
+    EXPECT_EQ(num_boottime_timestamps, num_arrival_timestamps);
+    for (int i = 1; i < num_vsync_timestamps; ++i) {
+      EXPECT_LT(vsync_timestamps->at(i - 1), vsync_timestamps->at(i));
+    }
+    for (int i = 1; i < num_frame_ids; ++i) {
+      EXPECT_LT(frame_ids->at(i - 1), frame_ids->at(i));
+    }
+    for (int i = 1; i < num_boottime_timestamps; ++i) {
+      EXPECT_LT(boottime_timestamps->at(i - 1), boottime_timestamps->at(i));
+    }
+    for (int i = 1; i < num_arrival_timestamps; ++i) {
+      EXPECT_LT(arrival_timestamps->at(i - 1), arrival_timestamps->at(i));
+    }
+  }
+
+  // Input boottime_timestamp and reference_vsync_timestamp must be valid.
+  void TestSyncTimestamp(int64_t boottime_timestamp,
+                         int64_t reference_vsync_timestamp) {
+    // Test valid input.
+    int64_t vsync_timestamp =
+        sensor_sync_ptr_->SyncTimestamp(boottime_timestamp);
+    EXPECT_NE(vsync_timestamp, boottime_timestamp);
+    EXPECT_EQ(vsync_timestamp, reference_vsync_timestamp);
+    // Test invalid input.
+    int64_t invalid_boottime_timestamp = boottime_timestamp + 15000000;
+    vsync_timestamp =
+        sensor_sync_ptr_->SyncTimestamp(invalid_boottime_timestamp);
+    EXPECT_EQ(vsync_timestamp, invalid_boottime_timestamp);
+  }
+
+  // Input boottime_timestamp, frame_id and reference_vsync_timestamp
+  // must be valid.
+  void TestMatchTimestamp(int64_t boottime_timestamp, int64_t frame_id,
+                          int64_t reference_vsync_timestamp) {
+    // Test valid input.
+    int64_t vsync_timestamp =
+        sensor_sync_ptr_->MatchTimestamp(boottime_timestamp, frame_id);
+    EXPECT_NE(vsync_timestamp, boottime_timestamp);
+    EXPECT_EQ(vsync_timestamp, reference_vsync_timestamp);
+    // Test invalid input.
+    int64_t invalid_boottime_timestamp = boottime_timestamp + 15000000;
+    vsync_timestamp =
+        sensor_sync_ptr_->MatchTimestamp(invalid_boottime_timestamp, frame_id);
+    EXPECT_EQ(vsync_timestamp, invalid_boottime_timestamp);
+    int64_t invalid_frame_id = frame_id + 1;
+    vsync_timestamp =
+        sensor_sync_ptr_->MatchTimestamp(boottime_timestamp, invalid_frame_id);
+    EXPECT_EQ(vsync_timestamp, boottime_timestamp);
+  }
+
+  void RunTest(int64_t query_interval_ms, int32_t num_query) {
+    const std::chrono::milliseconds kCheckPeriod(query_interval_ms);
+    for (int i = 0; i < num_query; ++i) {
+      // Test get Vsync timestamps.
+      std::this_thread::sleep_for(kCheckPeriod);
+      std::vector<int64_t> vsync_timestamps;
+      std::vector<int64_t> arrival_timestamps;
+      std::vector<int64_t> frame_ids;
+      std::vector<int64_t> boottime_timestamps;
+      TestGetLatestNSamples(/*num_sample_to_query=*/3, &vsync_timestamps,
+                            &frame_ids, &boottime_timestamps,
+                            &arrival_timestamps);
+      if (vsync_timestamps.size() == frame_ids.size() &&
+          vsync_timestamps.size() == boottime_timestamps.size() &&
+          vsync_timestamps.size() != 0) {
+        int64_t input_boottime_timestamp = boottime_timestamps.back();
+        int64_t input_frame_id = frame_ids.back();
+        int64_t reference_vsync_timestamp = vsync_timestamps.back();
+        TestSyncTimestamp(input_boottime_timestamp, reference_vsync_timestamp);
+        TestMatchTimestamp(input_boottime_timestamp, input_frame_id,
+                           reference_vsync_timestamp);
+      }
+    }
+  }
+
+  void LogDataToFile() {
+    std::stringstream filename;
+    filename << "camera_" << vsync_camera_id_ << "_sensor_sync_log";
+    std::string output_filename;
+    bool result = GenerateLogFilename(filename.str(), &output_filename);
+    ASSERT_EQ(result, true) << "fail to create goog_sensor_sync test log file";
+    std::cout << "save sensor_sync log to " << output_filename << "\n";
+    std::ofstream ofs(output_filename);
+    for (int i = 0; i < vsync_timestamps_.size(); ++i) {
+      ofs << "vsync: " << vsync_timestamps_[i]
+          << " boottime: " << boottime_timestamps_[i]
+          << " frame_id: " << frame_ids_[i]
+          << " arrival_time: " << vsync_arrival_timestamps_[i] << "\n";
+    }
+  }
+
+  ::android::sp<GoogSensorSync> sensor_sync_ptr_ = nullptr;
+  uint32_t vsync_camera_id_;
+  const int kEventQueueSize = 5;
+  std::vector<int64_t> vsync_timestamps_;
+  std::vector<int64_t> vsync_arrival_timestamps_;
+  std::vector<int64_t> boottime_timestamps_;
+  std::vector<int64_t> frame_ids_;
+};
+
+TEST_F(GoogSensorSyncTest, TestP4BackWideCamera) {
+  SetupTestEnvironment(CameraId::kPixel4BackWide);
+  const int kNumQuery = 200;
+  RunTest(/*query_interval_ms=*/33, /*num_query=*/kNumQuery);
+  TeardownTestEnvironment();
+  CheckSamplesValidy(/*num_query=*/kNumQuery);
+  LogDataToFile();
+}
+
+TEST_F(GoogSensorSyncTest, TestP4BackTeleCamera) {
+  SetupTestEnvironment(CameraId::kPixel4BackTele);
+  const int kNumQuery = 200;
+  RunTest(/*query_interval_ms=*/33, /*num_query=*/kNumQuery);
+  TeardownTestEnvironment();
+  CheckSamplesValidy(/*num_query=*/kNumQuery);
+  LogDataToFile();
+}
+
+// Event payload on Slider/P21 does not contain boottime timestamp as in
+// Pixel 4. This test checks the event period and delay to be in reasonable
+// ranges. Please manually open AOSP rear camera first and run this test.
+TEST_F(GoogSensorSyncTest, TestSliderBackWideCamera) {
+  uint8_t camera_id_int = CameraIdMapping(CameraId::kSliderBackWide);
+  ASSERT_NE(camera_id_int, static_cast<uint8_t>(CameraId::kMaxNum));
+  vsync_camera_id_ = static_cast<uint32_t>(camera_id_int);
+
+  // Create vsync sensor listener.
+  sensor_sync_ptr_ = GoogSensorSync::Create(camera_id_int, kEventQueueSize);
+  ASSERT_NE(sensor_sync_ptr_, nullptr);
+  ASSERT_TRUE(sensor_sync_ptr_->GetSensorEnablingStatus());
+
+  vsync_timestamps_.clear();
+  // Create customized callback function to collect incoming event data.
+  std::function<void(const ExtendedSensorEvent& event)> collect =
+      [this](const ExtendedSensorEvent& event) {
+        if (vsync_timestamps_.size() > 1) {
+          int period_ms =
+              (event.sensor_event.timestamp - vsync_timestamps_.back()) /
+              1000'000;
+          const int kDefaultFrameDurationMs =
+              34;  // Assume camera runs at 30 FPS.
+          EXPECT_LE(period_ms, kDefaultFrameDurationMs);
+          ALOGV("period %d", period_ms);
+        }
+        // Most delays are < 5 ms, but sometimes it can go up to > 15.
+        const int kAllowedDelayMs = 20;
+        int delay_ms =
+            (event.event_arrival_time_ns - event.sensor_event.timestamp) /
+            1000'000;
+        EXPECT_LE(delay_ms, kAllowedDelayMs);
+
+        vsync_timestamps_.push_back(event.sensor_event.timestamp);
+        ALOGV("delay %d", delay_ms);
+      };
+  sensor_sync_ptr_->SetEventProcessor(collect);
+
+  // Let the camera and sensor run for 10 sec.
+  const std::chrono::milliseconds kTestPeriod(10000);
+  std::this_thread::sleep_for(kTestPeriod);
+}
+
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/common/sensor_listener/tests/how_to_run.txt b/common/sensor_listener/tests/how_to_run.txt
new file mode 100644
index 0000000..87ccae3
--- /dev/null
+++ b/common/sensor_listener/tests/how_to_run.txt
@@ -0,0 +1,53 @@
+# Build tests:
+mmm -j16 hardware/google/camera/common/sensor_listener/
+
+# Currently there are 4 testsuites:
+# GoogSensorSyncTest: test Vsync sensor.
+# GoogGyroTest: test gyro direct channel and polling based gyro.
+# GoogSensorEnvironmentTest: test environment sensors, including:
+# device_orientation, light, proximity.
+# GoogSensorMotionTest: test motion sensors, including
+# accelerometer, gravity, gyroscope, linear_acceleration, magnetic_field.
+
+# Install test:
+adb shell mkdir vendor/bin/lib_sensor_listener_test
+adb push $OUT/testcases/lib_sensor_listener_test/arm64/lib_sensor_listener_test /vendor/bin/lib_sensor_listener_test/
+
+# Run gyro test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogGyroTest.*
+
+# Run device_orientation sensor test, try to rotate device:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorEnvironmentTest.TestDeviceOrientation
+
+# Run light sensor test, try to put device under different lighting conditions:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorEnvironmentTest.TestLight
+
+# Run proximity sensor test, try to move hands close/away from device:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorEnvironmentTest.TestProximity
+
+# Run accelerometer sensor test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorMotionTest.TestAccelerometer
+
+# Run gravity sensor test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorMotionTest.TestGravity
+
+# Run gyroscope sensor test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorMotionTest.TestGyroscope
+
+# Run linear_acceleration sensor test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorMotionTest.TestLinearAcceleration
+
+ # Run magnetic_field sensor test:
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorMotionTest.TestMagneticField
+
+# Run GoogSensorSyncTest
+# Since this test requires camera active streaming, install mCamera.apk first.
+# Steps:
+adb install -r -g vendor/google/camera/test/factory/prebuilt/mCamera.apk
+sh hardware/google/camera/common/sensor_listener/tests/run_goog_sensor_sync_test.sh
+
+# Test slider rear camera; Please open AOSP camera app manually first and then run the test.
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorSyncTest.TestSliderBackWideCamera
+
+# If making changes to lib_sensor_listener, update lib_sensor_listener as well:
+adb push $OUT/vendor/lib64/lib_sensor_listener.so vendor/lib64/
diff --git a/common/sensor_listener/tests/run_goog_sensor_sync_test.sh b/common/sensor_listener/tests/run_goog_sensor_sync_test.sh
new file mode 100644
index 0000000..a5a367b
--- /dev/null
+++ b/common/sensor_listener/tests/run_goog_sensor_sync_test.sh
@@ -0,0 +1,12 @@
+# Test P4 back wide physical camera
+adb root
+adb shell am start -n com.google.android.mcamera/.MainActivity --es type photo --es cameraId 0
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorSyncTest.TestP4BackWideCamera
+adb shell am force-stop com.google.android.mcamera
+
+sleep 10
+
+# Test P4 back tele physical camera
+adb shell am start -n com.google.android.mcamera/.MainActivity --es type photo --es cameraId 2
+adb shell /vendor/bin/lib_sensor_listener_test/lib_sensor_listener_test --gtest_filter=GoogSensorSyncTest.TestP4BackTeleCamera
+adb shell am force-stop com.google.android.mcamera
diff --git a/common/sensor_listener/tests/sensor_test_utils.h b/common/sensor_listener/tests/sensor_test_utils.h
new file mode 100644
index 0000000..1b457d3
--- /dev/null
+++ b/common/sensor_listener/tests/sensor_test_utils.h
@@ -0,0 +1,55 @@
+#include <atomic>
+#include <sstream>
+
+#include <log/log.h>
+#include <utils/SystemClock.h>
+
+namespace android {
+namespace camera_sensor_listener {
+namespace {
+
+// Based on input filename, generate output filename including
+// full path and timestamp.
+// Input:
+//   input_filename: partial filename, usually log type.
+// Output:
+//   output_full_filename: final output filename with full path.
+// Example:
+//   input_filename: gyro_direct_log
+//   output_full_filename:
+//     /vendor/bin/lib_sensor_listener_test/gyro_direct_log_%h%m%s.txt
+bool GenerateLogFilename(const std::string& input_filename,
+                         std::string* output_full_filename) {
+  const char* path = "/vendor/bin/lib_sensor_listener_test/";
+  struct stat directory_stat;
+  memset(&directory_stat, 0, sizeof(directory_stat));
+  if (stat(path, &directory_stat) != 0) {
+    // Create the directory if it doesn't exist.
+    if (errno == ENOENT) {
+      if (mkdir(path, S_IRWXU | S_IRWXG | S_IRWXO) != 0) {
+        ALOGE("%s: %d: createing %s failed. (errno: %d)", __func__, __LINE__,
+              path, errno);
+        return false;
+      }
+    } else {
+      ALOGE("%s: %d: stat(%s) failed: (errno: %d).", __func__, __LINE__, path,
+            errno);
+      return false;
+    }
+  }
+
+  char timebuf[256] = {0};
+  struct timeval now_time;
+  gettimeofday(&now_time, nullptr);
+  strftime(timebuf, 256, "%H%M%S", localtime(&now_time.tv_sec));
+
+  std::stringstream ss;
+  ss << path << input_filename << "_" << timebuf << "_" << now_time.tv_usec
+     << ".txt";
+  ss >> *output_full_filename;
+  return true;
+}
+
+}  // namespace
+}  // namespace camera_sensor_listener
+}  // namespace android
diff --git a/devices/EmulatedCamera/hwl/Android.bp b/devices/EmulatedCamera/hwl/Android.bp
index 9626a72..a195bfd 100644
--- a/devices/EmulatedCamera/hwl/Android.bp
+++ b/devices/EmulatedCamera/hwl/Android.bp
@@ -41,8 +41,10 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.sensors@1.0",
         "android.hidl.allocator@1.0",
+        "lib_profiler",
         "libbase",
         "libcamera_metadata",
         "libcutils",
diff --git a/devices/EmulatedCamera/hwl/Base.h b/devices/EmulatedCamera/hwl/Base.h
index 543f73f..883b61a 100644
--- a/devices/EmulatedCamera/hwl/Base.h
+++ b/devices/EmulatedCamera/hwl/Base.h
@@ -27,6 +27,7 @@
 namespace android {
 
 using android::hardware::camera::common::V1_0::helper::HandleImporter;
+using android::hardware::graphics::common::V1_1::PixelFormat;
 using google_camera_hal::HwlPipelineCallback;
 using google_camera_hal::StreamBuffer;
 
@@ -37,11 +38,12 @@
   uint32_t y_stride = 0;
   uint32_t cbcr_stride = 0;
   uint32_t cbcr_step = 0;
+  size_t bytesPerPixel = 1;
 };
 
 struct SinglePlane {
   uint8_t* img = nullptr;
-  uint32_t stride = 0;
+  uint32_t stride_in_bytes = 0;
   uint32_t buffer_size = 0;
 };
 
@@ -50,10 +52,10 @@
   uint32_t frame_number;
   uint32_t pipeline_id;
   uint32_t camera_id;
-  android_pixel_format_t format;
+  PixelFormat format;
   android_dataspace_t dataSpace;
   StreamBuffer stream_buffer;
-  HandleImporter importer;
+  std::shared_ptr<HandleImporter> importer;
   HwlPipelineCallback callback;
   int acquire_fence_fd;
   bool is_input;
@@ -64,14 +66,15 @@
     YCbCrPlanes img_y_crcb;
   } plane;
 
-  SensorBuffer()
+  SensorBuffer(std::shared_ptr<HandleImporter> handle_importer)
       : width(0),
         height(0),
         frame_number(0),
         pipeline_id(0),
         camera_id(0),
-        format(HAL_PIXEL_FORMAT_RGBA_8888),
+        format(PixelFormat::RGBA_8888),
         dataSpace(HAL_DATASPACE_UNKNOWN),
+        importer(handle_importer),
         acquire_fence_fd(-1),
         is_input(false),
         is_failed_request(false),
@@ -97,12 +100,11 @@
   inline void operator()(android::SensorBuffer* buffer) const {
     if (buffer != nullptr) {
       if (buffer->stream_buffer.buffer != nullptr) {
-        buffer->importer.unlock(buffer->stream_buffer.buffer);
-        buffer->importer.freeBuffer(buffer->stream_buffer.buffer);
+        buffer->importer->unlock(buffer->stream_buffer.buffer);
       }
 
       if (buffer->acquire_fence_fd >= 0) {
-        buffer->importer.closeFence(buffer->acquire_fence_fd);
+        buffer->importer->closeFence(buffer->acquire_fence_fd);
       }
 
       if ((buffer->stream_buffer.status != BufferStatus::kOk) &&
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
index 6974b6a..f6ffaee 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
@@ -67,16 +67,38 @@
 }
 
 status_t EmulatedCameraDeviceHwlImpl::Initialize() {
-  auto ret = GetSensorCharacteristics(static_metadata_.get(), &sensor_chars_);
+  auto ret = GetSensorCharacteristics(static_metadata_.get(),
+                                      &sensor_chars_[camera_id_]);
   if (ret != OK) {
     ALOGE("%s: Unable to extract sensor characteristics %s (%d)", __FUNCTION__,
           strerror(-ret), ret);
     return ret;
   }
 
-  stream_coniguration_map_ =
+  stream_configuration_map_ =
       std::make_unique<StreamConfigurationMap>(*static_metadata_);
+  stream_configuration_map_max_resolution_ =
+      std::make_unique<StreamConfigurationMap>(*static_metadata_,
+                                               /*maxResolution*/ true);
 
+  for (const auto& it : *physical_device_map_) {
+    uint32_t physical_id = it.first;
+    HalCameraMetadata* physical_hal_metadata = it.second.second.get();
+    physical_stream_configuration_map_.emplace(
+        physical_id,
+        std::make_unique<StreamConfigurationMap>(*physical_hal_metadata));
+    physical_stream_configuration_map_max_resolution_.emplace(
+        physical_id, std::make_unique<StreamConfigurationMap>(
+                         *physical_hal_metadata, /*maxResolution*/ true));
+
+    ret = GetSensorCharacteristics(physical_hal_metadata,
+                                   &sensor_chars_[physical_id]);
+    if (ret != OK) {
+      ALOGE("%s: Unable to extract camera %d sensor characteristics %s (%d)",
+            __FUNCTION__, physical_id, strerror(-ret), ret);
+      return ret;
+    }
+  }
   return OK;
 }
 
@@ -164,7 +186,10 @@
 bool EmulatedCameraDeviceHwlImpl::IsStreamCombinationSupported(
     const StreamConfiguration& stream_config) {
   return EmulatedSensor::IsStreamCombinationSupported(
-      stream_config, *stream_coniguration_map_, sensor_chars_);
+      camera_id_, stream_config, *stream_configuration_map_,
+      *stream_configuration_map_max_resolution_,
+      physical_stream_configuration_map_,
+      physical_stream_configuration_map_max_resolution_, sensor_chars_);
 }
 
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
index 90352f5..c88b804 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
@@ -80,10 +80,13 @@
   const uint32_t camera_id_ = 0;
 
   std::unique_ptr<HalCameraMetadata> static_metadata_;
-  std::unique_ptr<StreamConfigurationMap> stream_coniguration_map_;
+  std::unique_ptr<StreamConfigurationMap> stream_configuration_map_;
+  std::unique_ptr<StreamConfigurationMap> stream_configuration_map_max_resolution_;
+  PhysicalStreamConfigurationMap physical_stream_configuration_map_;
+  PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution_;
   PhysicalDeviceMapPtr physical_device_map_;
   std::shared_ptr<EmulatedTorchState> torch_state_;
-  SensorCharacteristics sensor_chars_;
+  LogicalCharacteristics sensor_chars_;
 };
 
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
index e848663..ee6461b 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
@@ -24,10 +24,53 @@
 #include <utils/Trace.h>
 
 #include "EmulatedSensor.h"
+#include "utils.h"
 #include "utils/HWLUtils.h"
 
 namespace android {
 
+using google_camera_hal::Rect;
+using google_camera_hal::utils::GetSensorActiveArraySize;
+using google_camera_hal::utils::HasCapability;
+
+std::unique_ptr<EmulatedCameraZoomRatioMapperHwlImpl>
+EmulatedCameraZoomRatioMapperHwlImpl::Create(
+    const std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>& dims) {
+  auto zoom_ratio_mapper_hwl_impl =
+      std::make_unique<EmulatedCameraZoomRatioMapperHwlImpl>(dims);
+  return zoom_ratio_mapper_hwl_impl;
+}
+
+EmulatedCameraZoomRatioMapperHwlImpl::EmulatedCameraZoomRatioMapperHwlImpl(
+    const std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>& dims)
+    : camera_ids_to_dimensions_(dims) {
+}
+
+bool EmulatedCameraZoomRatioMapperHwlImpl::GetActiveArrayDimensionToBeUsed(
+    uint32_t camera_id, const HalCameraMetadata* settings,
+    Dimension* active_array_dimension) const {
+  auto dim_it = camera_ids_to_dimensions_.find(camera_id);
+  if (settings == nullptr || active_array_dimension == nullptr ||
+      dim_it == camera_ids_to_dimensions_.end()) {
+    // default request / camera id not found
+    return false;
+  }
+  camera_metadata_ro_entry entry = {};
+  status_t res = settings->Get(ANDROID_SENSOR_PIXEL_MODE, &entry);
+  if (res != OK) {
+    // return default res dimension
+    *active_array_dimension = dim_it->second.second;
+    return true;
+  }
+  if (entry.data.u8[0] == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+    // return max res mode dimension
+    *active_array_dimension = dim_it->second.first;
+  } else {
+    *active_array_dimension = dim_it->second.second;
+  }
+  return true;
+}
+
 std::unique_ptr<EmulatedCameraDeviceSessionHwlImpl>
 EmulatedCameraDeviceSessionHwlImpl::Create(
     uint32_t camera_id, std::unique_ptr<HalCameraMetadata> static_meta,
@@ -57,12 +100,50 @@
   return session;
 }
 
+static std::pair<Dimension, Dimension> GetArrayDimensions(
+    uint32_t camera_id, const HalCameraMetadata* metadata) {
+  Rect active_array_size;
+  Dimension active_array_size_dimension;
+  Dimension active_array_size_dimension_maximum_resolution;
+  status_t ret = GetSensorActiveArraySize(metadata, &active_array_size);
+  if (ret != OK) {
+    ALOGE("%s: Failed to get sensor active array size for camera id %d",
+          __FUNCTION__, (int)camera_id);
+    return std::pair<Dimension, Dimension>();
+  }
+  active_array_size_dimension = {
+      active_array_size.right - active_array_size.left + 1,
+      active_array_size.bottom - active_array_size.top + 1};
+
+  active_array_size_dimension_maximum_resolution = active_array_size_dimension;
+  if (HasCapability(
+          metadata,
+          ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR)) {
+    status_t ret = GetSensorActiveArraySize(metadata, &active_array_size,
+                                            /*maximum_resolution=*/true);
+    if (ret != OK) {
+      ALOGE(
+          "%s: Failed to get max resolution sensor active array size for "
+          "camera id %d",
+          __FUNCTION__, (int)camera_id);
+      return std::pair<Dimension, Dimension>();
+    }
+    active_array_size_dimension_maximum_resolution = {
+        active_array_size.right - active_array_size.left + 1,
+        active_array_size.bottom - active_array_size.top + 1};
+  }
+  return std::make_pair(active_array_size_dimension_maximum_resolution,
+                        active_array_size_dimension);
+}
+
 status_t EmulatedCameraDeviceSessionHwlImpl::Initialize(
     uint32_t camera_id, std::unique_ptr<HalCameraMetadata> static_meta) {
   camera_id_ = camera_id;
   static_metadata_ = std::move(static_meta);
-  stream_coniguration_map_ =
+  stream_configuration_map_ =
       std::make_unique<StreamConfigurationMap>(*static_metadata_);
+  stream_configuration_map_max_resolution_ =
+      std::make_unique<StreamConfigurationMap>(*static_metadata_, true);
   camera_metadata_ro_entry_t entry;
   auto ret = static_metadata_->Get(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &entry);
   if (ret != OK) {
@@ -73,6 +154,11 @@
 
   max_pipeline_depth_ = entry.data.u8[0];
 
+  std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>
+      camera_ids_to_dimensions;
+  camera_ids_to_dimensions[camera_id] =
+      GetArrayDimensions(camera_id, static_metadata_.get());
+
   ret = GetSensorCharacteristics(static_metadata_.get(), &sensor_chars_);
   if (ret != OK) {
     ALOGE("%s: Unable to extract sensor characteristics %s (%d)", __FUNCTION__,
@@ -91,8 +177,18 @@
             __FUNCTION__, it.first, strerror(-ret), ret);
       return ret;
     }
+    camera_ids_to_dimensions[it.first] =
+        GetArrayDimensions(it.first, it.second.second.get());
+    physical_stream_configuration_map_.emplace(
+        it.first,
+        std::make_unique<StreamConfigurationMap>(*it.second.second.get()));
+    physical_stream_configuration_map_max_resolution_.emplace(
+        it.first, std::make_unique<StreamConfigurationMap>(
+                      *it.second.second.get(), true));
   }
 
+  zoom_ratio_mapper_hwl_impl_ = std::move(
+      EmulatedCameraZoomRatioMapperHwlImpl::Create(camera_ids_to_dimensions));
   return InitializeRequestProcessor();
 }
 
@@ -106,8 +202,8 @@
     return ret;
   }
 
-  request_processor_ =
-      std::make_unique<EmulatedRequestProcessor>(camera_id_, emulated_sensor);
+  request_processor_ = std::make_unique<EmulatedRequestProcessor>(
+      camera_id_, emulated_sensor, session_callback_);
 
   return request_processor_->Initialize(
       HalCameraMetadata::Clone(static_metadata_.get()),
@@ -146,7 +242,10 @@
   }
 
   if (!EmulatedSensor::IsStreamCombinationSupported(
-          request_config, *stream_coniguration_map_, sensor_chars_)) {
+          physical_camera_id, request_config, *stream_configuration_map_,
+          *stream_configuration_map_max_resolution_,
+          physical_stream_configuration_map_,
+          physical_stream_configuration_map_max_resolution_, logical_chars_)) {
     ALOGE("%s: Stream combination not supported!", __FUNCTION__);
     return BAD_VALUE;
   }
@@ -187,7 +286,19 @@
              .width = stream.width,
              .height = stream.height,
              .buffer_size = stream.buffer_size,
-             .is_input = is_input,}));
+             .is_input = is_input,
+             .group_id = stream.group_id}));
+
+    if (stream.group_id != -1 && stream.is_physical_camera_stream) {
+      // TODO: For quad bayer camera, the logical camera id should be used if
+      // this is not a physical camera.
+      dynamic_stream_id_map_[stream.physical_camera_id][stream.group_id] =
+          stream.id;
+    }
+    if (!stream.is_physical_camera_stream &&
+        (stream.format == HAL_PIXEL_FORMAT_RAW16)) {
+      has_raw_stream_ = true;
+    }
   }
 
   pipelines_.push_back(emulated_pipeline);
@@ -256,12 +367,35 @@
   }
 
   pipelines_built_ = false;
+  has_raw_stream_ = false;
   pipelines_.clear();
   request_processor_ = nullptr;
 }
 
+status_t EmulatedCameraDeviceSessionHwlImpl::CheckOutputFormatsForInput(
+    const HwlPipelineRequest& request,
+    const std::unordered_map<uint32_t, EmulatedStream>& streams,
+    const std::unique_ptr<StreamConfigurationMap>& stream_configuration_map,
+    android_pixel_format_t input_format) {
+  auto output_formats =
+      stream_configuration_map->GetValidOutputFormatsForInput(input_format);
+  for (const auto& output_buffer : request.output_buffers) {
+    auto output_stream = streams.at(output_buffer.stream_id);
+    if (output_formats.find(output_stream.override_format) ==
+        output_formats.end()) {
+      ALOGE(
+          "%s: Reprocess request with input format: 0x%x to output "
+          "format: 0x%x"
+          " not supported!",
+          __FUNCTION__, input_format, output_stream.override_format);
+      return BAD_VALUE;
+    }
+  }
+  return OK;
+}
+
 status_t EmulatedCameraDeviceSessionHwlImpl::SubmitRequests(
-    uint32_t frame_number, const std::vector<HwlPipelineRequest>& requests) {
+    uint32_t frame_number, std::vector<HwlPipelineRequest>& requests) {
   ATRACE_CALL();
   std::lock_guard<std::mutex> lock(api_mutex_);
 
@@ -271,21 +405,13 @@
       for (const auto& input_buffer : request.input_buffers) {
         const auto& streams = pipelines_[request.pipeline_id].streams;
         auto input_stream = streams.at(input_buffer.stream_id);
-        auto output_formats =
-            stream_coniguration_map_->GetValidOutputFormatsForInput(
-                input_stream.override_format);
-        for (const auto& output_buffer : request.output_buffers) {
-          auto output_stream = streams.at(output_buffer.stream_id);
-          if (output_formats.find(output_stream.override_format) ==
-              output_formats.end()) {
-            ALOGE(
-                "%s: Reprocess request with input format: 0x%x to output "
-                "format: 0x%x"
-                " not supported!",
-                __FUNCTION__, input_stream.override_format,
-                output_stream.override_format);
-            return BAD_VALUE;
-          }
+        if ((CheckOutputFormatsForInput(request, streams,
+                                        stream_configuration_map_,
+                                        input_stream.override_format) != OK) &&
+            (CheckOutputFormatsForInput(
+                 request, streams, stream_configuration_map_max_resolution_,
+                 input_stream.override_format) != OK)) {
+          return BAD_VALUE;
         }
       }
     }
@@ -297,8 +423,9 @@
     return INVALID_OPERATION;
   }
 
-  return request_processor_->ProcessPipelineRequests(frame_number, requests,
-                                                     pipelines_);
+  return request_processor_->ProcessPipelineRequests(
+      frame_number, requests, pipelines_, dynamic_stream_id_map_,
+      has_raw_stream_);
 }
 
 status_t EmulatedCameraDeviceSessionHwlImpl::Flush() {
@@ -371,4 +498,14 @@
   return OK;
 }
 
+void EmulatedCameraDeviceSessionHwlImpl::SetSessionCallback(
+    const HwlSessionCallback& hwl_session_callback) {
+  ATRACE_CALL();
+  std::lock_guard<std::mutex> lock(api_mutex_);
+  session_callback_ = hwl_session_callback;
+  if (request_processor_ != nullptr) {
+    request_processor_->SetSessionCallback(hwl_session_callback);
+  }
+}
+
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
index 076d386..52a4920 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
@@ -31,17 +31,48 @@
 
 using google_camera_hal::CameraDeviceHwl;
 using google_camera_hal::CameraDeviceSessionHwl;
+using google_camera_hal::CaptureRequest;
+using google_camera_hal::CaptureResult;
+using google_camera_hal::Dimension;
 using google_camera_hal::HalStream;
 using google_camera_hal::HwlOfflinePipelineRole;
 using google_camera_hal::HwlPipelineCallback;
 using google_camera_hal::HwlPipelineRequest;
 using google_camera_hal::HwlSessionCallback;
 using google_camera_hal::IMulticamCoordinatorHwl;
-using google_camera_hal::StreamConfiguration;
 using google_camera_hal::RequestTemplate;
 using google_camera_hal::SessionDataKey;
 using google_camera_hal::Stream;
 using google_camera_hal::StreamConfiguration;
+using google_camera_hal::ZoomRatioMapperHwl;
+
+class EmulatedCameraZoomRatioMapperHwlImpl : public ZoomRatioMapperHwl {
+ public:
+  EmulatedCameraZoomRatioMapperHwlImpl(
+      const std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>& dims);
+  virtual ~EmulatedCameraZoomRatioMapperHwlImpl() = default;
+
+  // Limit zoom ratio if concurrent mode is on
+  virtual void LimitZoomRatioIfConcurrent(float*) const override{};
+
+  // Get the array dimensions to be used for this capture request / result
+  virtual bool GetActiveArrayDimensionToBeUsed(
+      uint32_t camera_id, const HalCameraMetadata* settings,
+      Dimension* active_array_dimension) const override;
+  // Apply zoom ratio to capture request
+  virtual void UpdateCaptureRequest(CaptureRequest*) override{};
+
+  // Apply zoom ratio to capture result
+  virtual void UpdateCaptureResult(CaptureResult*) override{};
+
+  static std::unique_ptr<EmulatedCameraZoomRatioMapperHwlImpl> Create(
+      const std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>& dims);
+
+ private:
+  // camera id -> {max res dimension (array size), default dimension }
+  std::unordered_map<uint32_t, std::pair<Dimension, Dimension>>
+      camera_ids_to_dimensions_;
+};
 
 // Implementation of CameraDeviceSessionHwl interface
 class EmulatedCameraDeviceSessionHwlImpl : public CameraDeviceSessionHwl {
@@ -88,9 +119,8 @@
 
   void DestroyPipelines() override;
 
-  status_t SubmitRequests(
-      uint32_t frame_number,
-      const std::vector<HwlPipelineRequest>& requests) override;
+  status_t SubmitRequests(uint32_t frame_number,
+                          std::vector<HwlPipelineRequest>& requests) override;
 
   status_t Flush() override;
 
@@ -117,8 +147,7 @@
   }  // Noop for now
 
   void SetSessionCallback(
-      const HwlSessionCallback& /*hwl_session_callback*/) override {
-  }
+      const HwlSessionCallback& hwl_session_callback) override;
 
   status_t FilterResultMetadata(HalCameraMetadata* /*metadata*/) const override {
     return OK;
@@ -142,7 +171,7 @@
 
   std::unique_ptr<google_camera_hal::ZoomRatioMapperHwl> GetZoomRatioMapperHwl()
       override {
-    return nullptr;
+    return std::move(zoom_ratio_mapper_hwl_impl_);
   }
   // End override functions in CameraDeviceSessionHwl
 
@@ -151,6 +180,12 @@
                       std::unique_ptr<HalCameraMetadata> static_meta);
   status_t InitializeRequestProcessor();
 
+  status_t CheckOutputFormatsForInput(
+      const HwlPipelineRequest& request,
+      const std::unordered_map<uint32_t, EmulatedStream>& streams,
+      const std::unique_ptr<StreamConfigurationMap>& stream_configuration_map,
+      android_pixel_format_t input_format);
+
   EmulatedCameraDeviceSessionHwlImpl(
       PhysicalDeviceMapPtr physical_devices,
       std::shared_ptr<EmulatedTorchState> torch_state)
@@ -165,14 +200,21 @@
   uint32_t camera_id_ = 0;
   bool error_state_ = false;
   bool pipelines_built_ = false;
+  bool has_raw_stream_ = false;
   std::unique_ptr<HalCameraMetadata> static_metadata_;
   std::vector<EmulatedPipeline> pipelines_;
   std::unique_ptr<EmulatedRequestProcessor> request_processor_;
-  std::unique_ptr<StreamConfigurationMap> stream_coniguration_map_;
+  std::unique_ptr<StreamConfigurationMap> stream_configuration_map_;
+  PhysicalStreamConfigurationMap physical_stream_configuration_map_;
+  PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution_;
+  std::unique_ptr<StreamConfigurationMap> stream_configuration_map_max_resolution_;
   SensorCharacteristics sensor_chars_;
   std::shared_ptr<EmulatedTorchState> torch_state_;
   PhysicalDeviceMapPtr physical_device_map_;
   LogicalCharacteristics logical_chars_;
+  HwlSessionCallback session_callback_;
+  DynamicStreamIdMapType dynamic_stream_id_map_;
+  std::unique_ptr<EmulatedCameraZoomRatioMapperHwlImpl> zoom_ratio_mapper_hwl_impl_;
 };
 
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
index f47db4e..910c9cb 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
@@ -285,19 +285,51 @@
       ALOGE("%s: Camera id %u does not exist", __FUNCTION__, config.camera_id);
       return BAD_VALUE;
     }
+
     auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
         *(static_metadata_[config.camera_id]));
-    SensorCharacteristics sensor_chars;
-    status_t ret = GetSensorCharacteristics(
-        (static_metadata_[config.camera_id]).get(), &sensor_chars);
+    auto stream_configuration_map_max_resolution =
+        std::make_unique<StreamConfigurationMap>(
+            *(static_metadata_[config.camera_id]), /*maxResolution*/ true);
+
+    LogicalCharacteristics sensor_chars;
+    status_t ret =
+        GetSensorCharacteristics((static_metadata_[config.camera_id]).get(),
+                                 &sensor_chars[config.camera_id]);
     if (ret != OK) {
       ALOGE("%s: Unable to extract sensor chars for camera id %u", __FUNCTION__,
             config.camera_id);
       return UNKNOWN_ERROR;
     }
+
+    PhysicalStreamConfigurationMap physical_stream_configuration_map;
+    PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution;
+    auto const& physicalCameraInfo = camera_id_map_[config.camera_id];
+    for (size_t i = 0; i < physicalCameraInfo.size(); i++) {
+      uint32_t physical_camera_id = physicalCameraInfo[i].second;
+      physical_stream_configuration_map.emplace(
+          physical_camera_id, std::make_unique<StreamConfigurationMap>(
+                                  *(static_metadata_[physical_camera_id])));
+
+      physical_stream_configuration_map_max_resolution.emplace(
+          physical_camera_id,
+          std::make_unique<StreamConfigurationMap>(
+              *(static_metadata_[physical_camera_id]), /*maxResolution*/ true));
+
+      ret = GetSensorCharacteristics(static_metadata_[physical_camera_id].get(),
+                                     &sensor_chars[physical_camera_id]);
+      if (ret != OK) {
+        ALOGE("%s: Unable to extract camera %d sensor characteristics %s (%d)",
+              __FUNCTION__, physical_camera_id, strerror(-ret), ret);
+        return ret;
+      }
+    }
+
     if (!EmulatedSensor::IsStreamCombinationSupported(
-            config.stream_configuration, *stream_configuration_map,
-            sensor_chars)) {
+            config.camera_id, config.stream_configuration,
+            *stream_configuration_map, *stream_configuration_map_max_resolution,
+            physical_stream_configuration_map,
+            physical_stream_configuration_map_max_resolution, sensor_chars)) {
       return OK;
     }
   }
@@ -690,7 +722,7 @@
       }
     }
   }
-  static_metadata_.resize(sizeof(configurationFileLocation));
+  static_metadata_.resize(ARRAY_SIZE(kConfigurationFileLocation));
 
   for (const auto& config_path : configurationFileLocation) {
     if (!android::base::ReadFileToString(config_path, &config)) {
diff --git a/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.cpp b/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.cpp
index 4b9b5d3..765e6a7 100644
--- a/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.cpp
@@ -16,6 +16,7 @@
 
 #define LOG_TAG "EmulatedLogicalState"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
 
 #include "EmulatedLogicalRequestState.h"
 
@@ -37,32 +38,24 @@
     std::unique_ptr<HalCameraMetadata> static_meta,
     PhysicalDeviceMapPtr physical_devices) {
   if ((physical_devices.get() != nullptr) && (!physical_devices->empty())) {
+    zoom_ratio_physical_camera_info_ = GetZoomRatioPhysicalCameraInfo(
+        static_meta.get(), physical_devices.get());
+
     physical_device_map_ = std::move(physical_devices);
-    // If possible map the available focal lengths to individual physical devices
-    camera_metadata_ro_entry_t logical_entry, physical_entry;
-    auto ret = static_meta->Get(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
-                                &logical_entry);
-    if ((ret == OK) && (logical_entry.count > 1)) {
-      for (size_t i = 0; i < logical_entry.count; i++) {
-        for (const auto& it : *physical_device_map_) {
-          if (it.second.first != CameraDeviceStatus::kPresent) {
-            continue;
-          }
-          ret = it.second.second->Get(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
-                               &physical_entry);
-          if ((ret == OK) && (physical_entry.count > 0)) {
-            if (logical_entry.data.f[i] == physical_entry.data.f[0]) {
-              physical_focal_length_map_[physical_entry.data.f[0]] = it.first;
-              break;
-            }
-          }
-        }
+
+    static const float ZOOM_RATIO_THRESHOLD = 0.001f;
+    for (const auto& one_zoom_range : zoom_ratio_physical_camera_info_) {
+      ALOGV("%s: cameraId %d, focalLength %f, zoomRatioRange [%f, %f]",
+            __FUNCTION__, one_zoom_range.physical_camera_id,
+            one_zoom_range.focal_length, one_zoom_range.min_zoom_ratio,
+            one_zoom_range.max_zoom_ratio);
+      if (std::abs(one_zoom_range.min_zoom_ratio - 1.0f) < ZOOM_RATIO_THRESHOLD) {
+        current_physical_camera_ = one_zoom_range.physical_camera_id;
       }
     }
 
-    if (physical_focal_length_map_.size() > 1) {
+    if (zoom_ratio_physical_camera_info_.size() > 1) {
       is_logical_device_ = true;
-      current_focal_length_ = logical_entry.data.f[0];
       for (const auto& it : *physical_device_map_) {
         std::unique_ptr<EmulatedRequestState> physical_request_state =
             std::make_unique<EmulatedRequestState>(it.first);
@@ -88,6 +81,22 @@
   return logical_request_state_->GetDefaultRequest(type, default_settings);
 };
 
+void EmulatedLogicalRequestState::UpdateActivePhysicalId(
+    HalCameraMetadata* result_metadata, uint32_t device_id) {
+  if (result_metadata == nullptr) {
+    return;
+  }
+
+  auto device_id_str = std::to_string(device_id);
+  std::vector<uint8_t> result;
+  result.reserve(device_id_str.size() + 1);
+  result.insert(result.end(), device_id_str.begin(), device_id_str.end());
+  result.push_back('\0');
+
+  result_metadata->Set(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
+                       result.data(), result.size());
+}
+
 std::unique_ptr<HwlPipelineResult>
 EmulatedLogicalRequestState::InitializeLogicalResult(uint32_t pipeline_id,
                                                      uint32_t frame_number) {
@@ -101,18 +110,12 @@
             std::move(physical_request_states_[it]
                           ->InitializeResult(pipeline_id, frame_number)
                           ->result_metadata);
+
+        UpdateActivePhysicalId(ret->physical_camera_results[it].get(), it);
       }
     }
-    auto physical_device_id =
-        std::to_string(physical_focal_length_map_[current_focal_length_]);
-    std::vector<uint8_t> result;
-    result.reserve(physical_device_id.size() + 1);
-    result.insert(result.end(), physical_device_id.begin(),
-                  physical_device_id.end());
-    result.push_back('\0');
 
-    ret->result_metadata->Set(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
-                              result.data(), result.size());
+    UpdateActivePhysicalId(ret->result_metadata.get(), current_physical_camera_);
   }
 
   return ret;
@@ -163,21 +166,6 @@
         }
       }
     }
-
-    camera_metadata_ro_entry entry;
-    auto stat = request_settings->Get(ANDROID_LENS_FOCAL_LENGTH, &entry);
-    if ((stat == OK) && (entry.count == 1)) {
-      if (physical_focal_length_map_.find(entry.data.f[0]) !=
-          physical_focal_length_map_.end()) {
-        current_focal_length_ = entry.data.f[0];
-      } else {
-        ALOGE("%s: Unsupported focal length set: %5.2f, re-using older value!",
-              __FUNCTION__, entry.data.f[0]);
-      }
-    } else {
-      ALOGW("%s: Focal length absent from request, re-using older value!",
-            __FUNCTION__);
-    }
   }
 
   EmulatedSensor::SensorSettings sensor_settings;
@@ -209,42 +197,42 @@
   // physical devices with different focal lengths. Usually real logical
   // cameras like that will have device specific logic to switch between
   // physical sensors. Unfortunately we cannot infer this behavior using only
-  // static camera characteristics. Instead of this, detect the different
-  // focal lengths and update the logical
-  // "android.lens.info.availableFocalLengths" accordingly.
+  // static camera characteristics. Use a simplistic approach of inferring
+  // physical camera based on zoom ratio.
+  std::vector<ZoomRatioPhysicalCameraInfo> zoom_ratio_physical_camera_info =
+      GetZoomRatioPhysicalCameraInfo(logical_chars.get(),
+                                     physical_devices.get());
+
   std::vector<uint8_t> physical_ids;
-  std::set<float> focal_lengths;
-  camera_metadata_ro_entry_t entry;
   for (const auto& physical_device : *physical_devices) {
     auto physical_id = std::to_string(physical_device.first);
     physical_ids.insert(physical_ids.end(), physical_id.begin(),
                         physical_id.end());
     physical_ids.push_back('\0');
-    auto ret = physical_device.second.second->Get(
-        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &entry);
-    if ((ret == OK) && (entry.count > 0)) {
-      focal_lengths.insert(entry.data.f, entry.data.f + entry.count);
-    }
   }
-  logical_chars->Set(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
-                     physical_ids.data(), physical_ids.size());
 
-  if (focal_lengths.size() > 1) {
-    std::vector<float> focal_buffer;
-    focal_buffer.reserve(focal_lengths.size());
-    focal_buffer.insert(focal_buffer.end(), focal_lengths.begin(),
-                        focal_lengths.end());
-    logical_chars->Set(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
-                       focal_buffer.data(), focal_buffer.size());
+  if (zoom_ratio_physical_camera_info.size() > 1) {
+    float zoom_range[2];
+    zoom_range[0] = zoom_ratio_physical_camera_info[0].min_zoom_ratio;
+    zoom_range[1] =
+        zoom_ratio_physical_camera_info[zoom_ratio_physical_camera_info.size() - 1]
+            .max_zoom_ratio;
+    logical_chars->Set(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoom_range, 2);
+
+    logical_chars->Set(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+                       &zoom_range[1], 1);
+
+    logical_chars->Set(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
+                       physical_ids.data(), physical_ids.size());
 
     // Possibly needs to be removed at some later point:
     int32_t default_physical_id = physical_devices->begin()->first;
     logical_chars->Set(google_camera_hal::kLogicalCamDefaultPhysicalId,
                        &default_physical_id, 1);
 
+    camera_metadata_ro_entry entry;
     logical_chars->Get(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
     std::set<int32_t> keys(entry.data.i32, entry.data.i32 + entry.count);
-    keys.emplace(ANDROID_LENS_FOCAL_LENGTH);
     keys.emplace(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
     std::vector<int32_t> keys_buffer(keys.begin(), keys.end());
     logical_chars->Set(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
@@ -257,23 +245,13 @@
     // Due to API limitations we currently don't support individual physical requests
     logical_chars->Erase(ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
     keys.erase(ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
-    keys.emplace(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
     keys.emplace(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS);
     keys_buffer.insert(keys_buffer.end(), keys.begin(), keys.end());
     logical_chars->Set(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
                        keys_buffer.data(), keys_buffer.size());
-
-    keys.clear();
-    keys_buffer.clear();
-    logical_chars->Get(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
-    keys.insert(entry.data.i32, entry.data.i32 + entry.count);
-    keys.emplace(ANDROID_LENS_FOCAL_LENGTH);
-    keys_buffer.insert(keys_buffer.end(), keys.begin(), keys.end());
-    logical_chars->Set(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
-                       keys_buffer.data(), keys_buffer.size());
   } else {
     ALOGW(
-        "%s: The logical camera doesn't support different focal lengths. "
+        "%s: The logical camera doesn't support combined zoom ratio ranges. "
         "Emulation "
         "could be"
         " very limited in this case!",
@@ -283,4 +261,141 @@
   return logical_chars;
 }
 
+status_t EmulatedLogicalRequestState::UpdateRequestForDynamicStreams(
+    HwlPipelineRequest* request, const std::vector<EmulatedPipeline>& pipelines,
+    const DynamicStreamIdMapType& dynamic_stream_id_map,
+    bool use_default_physical_camera) {
+  if (request == nullptr) {
+    ALOGE("%s: Request must not be null!", __FUNCTION__);
+    return BAD_VALUE;
+  }
+
+  uint32_t pipeline_id = request->pipeline_id;
+  if (pipeline_id >= pipelines.size()) {
+    ALOGE("%s: Invalid pipeline id %d", __FUNCTION__, pipeline_id);
+    return BAD_VALUE;
+  }
+
+  // Only logical camera support dynamic size streams.
+  if (!is_logical_device_) return OK;
+
+  if (request->settings != nullptr) {
+    camera_metadata_ro_entry entry;
+    auto stat = request->settings->Get(ANDROID_CONTROL_ZOOM_RATIO, &entry);
+    if (stat != OK || entry.count != 1) {
+      ALOGW("%s: Zoom ratio absent from request, re-using older value!",
+            __FUNCTION__);
+      return BAD_VALUE;
+    }
+    if (!use_default_physical_camera) {
+      float zoom_ratio = entry.data.f[0];
+      for (const auto& one_range : zoom_ratio_physical_camera_info_) {
+        if (zoom_ratio >= one_range.min_zoom_ratio &&
+            zoom_ratio <= one_range.max_zoom_ratio) {
+          current_physical_camera_ = one_range.physical_camera_id;
+          break;
+        }
+      }
+    }
+  }
+
+  const auto& current_pipeline = pipelines[pipeline_id];
+  for (auto& output_buffer : request->output_buffers) {
+    auto& current_stream = current_pipeline.streams.at(output_buffer.stream_id);
+    if (current_stream.group_id == -1) continue;
+
+    const auto& stream_ids_for_camera =
+        dynamic_stream_id_map.find(current_physical_camera_);
+    if (stream_ids_for_camera == dynamic_stream_id_map.end()) {
+      ALOGW(
+          "%s: Failed to find physical camera id %d in dynamic stream id map!",
+          __FUNCTION__, current_physical_camera_);
+      continue;
+    }
+    const auto& stream_id =
+        stream_ids_for_camera->second.find(current_stream.group_id);
+    if (stream_id == stream_ids_for_camera->second.end()) {
+      ALOGW(
+          "%s: Failed to find group id %d in dynamic stream id map for camera "
+          "%d",
+          __FUNCTION__, current_stream.group_id, current_physical_camera_);
+      continue;
+    }
+
+    output_buffer.stream_id = stream_id->second;
+  }
+  return OK;
+}
+
+std::vector<ZoomRatioPhysicalCameraInfo>
+EmulatedLogicalRequestState::GetZoomRatioPhysicalCameraInfo(
+    const HalCameraMetadata* logical_chars,
+    const PhysicalDeviceMap* physical_devices) {
+  std::vector<ZoomRatioPhysicalCameraInfo> zoom_ratio_physical_camera_info;
+  if ((logical_chars == nullptr) || (physical_devices == nullptr)) {
+    return zoom_ratio_physical_camera_info;
+  }
+
+  // Get the logical camera's focal length and sensor size
+  camera_metadata_ro_entry_t entry;
+  auto ret =
+      logical_chars->Get(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &entry);
+  if ((ret != OK) || (entry.count == 0)) {
+    return zoom_ratio_physical_camera_info;
+  }
+  float logical_focal_length = entry.data.f[0];
+  ret = logical_chars->Get(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, &entry);
+  if ((ret != OK) || (entry.count == 0)) {
+    return zoom_ratio_physical_camera_info;
+  }
+  float logical_sensor_width = entry.data.f[0];
+
+  // Derive the zoom ratio boundary values for each physical camera id, based on
+  // focal lengths and camera sensor physical size.
+  for (const auto& physical_device : *physical_devices) {
+    ret = physical_device.second.second->Get(
+        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &entry);
+    if ((ret == OK) && (entry.count > 0)) {
+      float focal_length = entry.data.f[0];
+      ret = physical_device.second.second->Get(
+          ANDROID_SENSOR_INFO_PHYSICAL_SIZE, &entry);
+      if ((ret == OK) && (entry.count > 0)) {
+        float sensor_width = entry.data.f[0];
+        ret = physical_device.second.second->Get(
+            ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &entry);
+        if ((ret == OK) && (entry.count > 0)) {
+          float max_digital_zoom = entry.data.f[0];
+          // focal length of ultrawide lens
+          float min_zoom_ratio = focal_length * logical_sensor_width /
+                                 (logical_focal_length * sensor_width);
+          float max_zoom_ratio = max_digital_zoom * min_zoom_ratio;
+          zoom_ratio_physical_camera_info.push_back(
+              {focal_length, min_zoom_ratio, max_zoom_ratio,
+               physical_device.first});
+        }
+      }
+    }
+  }
+
+  // Sort the mapping by ascending focal length
+  std::sort(zoom_ratio_physical_camera_info.begin(),
+            zoom_ratio_physical_camera_info.end(),
+            [](const ZoomRatioPhysicalCameraInfo& a,
+               const ZoomRatioPhysicalCameraInfo& b) {
+              return a.focal_length < b.focal_length;
+            });
+
+  // Modify the zoom ratio range for each focal length so that they don't
+  // overlap
+  for (size_t i = 0; i < zoom_ratio_physical_camera_info.size() - 1; i++) {
+    auto& current = zoom_ratio_physical_camera_info[i];
+    auto& next = zoom_ratio_physical_camera_info[i + 1];
+    if (current.max_zoom_ratio > next.min_zoom_ratio) {
+      current.max_zoom_ratio = next.min_zoom_ratio;
+    }
+  }
+
+  return zoom_ratio_physical_camera_info;
+}
+
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.h b/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.h
index 93b4c04..1a7edfc 100644
--- a/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.h
+++ b/devices/EmulatedCamera/hwl/EmulatedLogicalRequestState.h
@@ -24,6 +24,34 @@
 namespace android {
 
 using google_camera_hal::HalCameraMetadata;
+using google_camera_hal::HalStream;
+using google_camera_hal::HwlPipelineCallback;
+
+struct EmulatedStream : public HalStream {
+  uint32_t width, height;
+  size_t buffer_size;
+  bool is_input;
+  int32_t group_id;
+};
+
+struct EmulatedPipeline {
+  HwlPipelineCallback cb;
+  // stream id -> stream map
+  std::unordered_map<uint32_t, EmulatedStream> streams;
+  uint32_t physical_camera_id, pipeline_id;
+};
+
+// [physical_camera_id -> [group_id -> stream_id]]
+typedef std::map<uint32_t, std::map<uint32_t, int32_t>> DynamicStreamIdMapType;
+
+// Info keeping track of mapping between zoom ratio range, focal length, and
+// physical camera Id.
+struct ZoomRatioPhysicalCameraInfo {
+  float focal_length;
+  float min_zoom_ratio;
+  float max_zoom_ratio;
+  uint32_t physical_camera_id;
+};
 
 class EmulatedLogicalRequestState {
  public:
@@ -49,6 +77,12 @@
       std::unique_ptr<HalCameraMetadata> logical_chars,
       PhysicalDeviceMapPtr physical_devices);
 
+  status_t UpdateRequestForDynamicStreams(
+      HwlPipelineRequest* request,
+      const std::vector<EmulatedPipeline>& pipelines,
+      const DynamicStreamIdMapType& dynamic_stream_id_map_type,
+      bool use_default_physical_camera);
+
  private:
   uint32_t logical_camera_id_ = 0;
   std::unique_ptr<EmulatedRequestState> logical_request_state_;
@@ -58,9 +92,17 @@
   // Maps a physical device id to its respective request state
   std::unordered_map<uint32_t, std::unique_ptr<EmulatedRequestState>>
       physical_request_states_;
-  // Maps particular focal length to physical device id
-  std::unordered_map<float, uint32_t> physical_focal_length_map_;
-  float current_focal_length_ = 0.f;
+
+  // Describes the mapping between particular zoom ratio boundary value and
+  // physical device id. The vector is sorted by ascending zoom ratios.
+  std::vector<ZoomRatioPhysicalCameraInfo> zoom_ratio_physical_camera_info_;
+  uint32_t current_physical_camera_ = 0;
+
+  static std::vector<ZoomRatioPhysicalCameraInfo> GetZoomRatioPhysicalCameraInfo(
+      const HalCameraMetadata* logical_chars,
+      const PhysicalDeviceMap* physical_devices);
+  static void UpdateActivePhysicalId(HalCameraMetadata* result_metadata,
+                                     uint32_t device_id);
 
   EmulatedLogicalRequestState(const EmulatedLogicalRequestState&) = delete;
   EmulatedLogicalRequestState& operator=(const EmulatedLogicalRequestState&) =
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp
index 3fe9009..8c7f24d 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp
@@ -34,13 +34,16 @@
 using google_camera_hal::MessageType;
 using google_camera_hal::NotifyMessage;
 
-EmulatedRequestProcessor::EmulatedRequestProcessor(uint32_t camera_id,
-                                                   sp<EmulatedSensor> sensor)
+EmulatedRequestProcessor::EmulatedRequestProcessor(
+    uint32_t camera_id, sp<EmulatedSensor> sensor,
+    const HwlSessionCallback& session_callback)
     : camera_id_(camera_id),
       sensor_(sensor),
+      session_callback_(session_callback),
       request_state_(std::make_unique<EmulatedLogicalRequestState>(camera_id)) {
   ATRACE_CALL();
   request_thread_ = std::thread([this] { this->RequestProcessorLoop(); });
+  importer_ = std::make_shared<HandleImporter>();
 }
 
 EmulatedRequestProcessor::~EmulatedRequestProcessor() {
@@ -56,13 +59,16 @@
 }
 
 status_t EmulatedRequestProcessor::ProcessPipelineRequests(
-    uint32_t frame_number, const std::vector<HwlPipelineRequest>& requests,
-    const std::vector<EmulatedPipeline>& pipelines) {
+    uint32_t frame_number, std::vector<HwlPipelineRequest>& requests,
+    const std::vector<EmulatedPipeline>& pipelines,
+    const DynamicStreamIdMapType& dynamic_stream_id_map,
+    bool use_default_physical_camera) {
   ATRACE_CALL();
+  status_t res = OK;
 
   std::unique_lock<std::mutex> lock(process_mutex_);
 
-  for (const auto& request : requests) {
+  for (auto& request : requests) {
     if (request.pipeline_id >= pipelines.size()) {
       ALOGE("%s: Pipeline request with invalid pipeline id: %u", __FUNCTION__,
             request.pipeline_id);
@@ -74,19 +80,33 @@
           lock, std::chrono::nanoseconds(
                     EmulatedSensor::kSupportedFrameDurationRange[1]));
       if (result == std::cv_status::timeout) {
-        ALOGE("%s Timed out waiting for a pending request slot", __FUNCTION__);
+        ALOGE("%s: Timed out waiting for a pending request slot", __FUNCTION__);
         return TIMED_OUT;
       }
     }
 
+    res = request_state_->UpdateRequestForDynamicStreams(
+        &request, pipelines, dynamic_stream_id_map, use_default_physical_camera);
+    if (res != OK) {
+      ALOGE("%s: Failed to update request for dynamic streams: %s(%d)",
+            __FUNCTION__, strerror(-res), res);
+      return res;
+    }
+
     auto output_buffers = CreateSensorBuffers(
         frame_number, request.output_buffers,
         pipelines[request.pipeline_id].streams, request.pipeline_id,
-        pipelines[request.pipeline_id].cb);
+        pipelines[request.pipeline_id].cb, /*override_width*/ 0,
+        /*override_height*/ 0);
+    if (output_buffers == nullptr) {
+      return NO_MEMORY;
+    }
+
     auto input_buffers = CreateSensorBuffers(
         frame_number, request.input_buffers,
         pipelines[request.pipeline_id].streams, request.pipeline_id,
-        pipelines[request.pipeline_id].cb);
+        pipelines[request.pipeline_id].cb, request.input_width,
+        request.input_height);
 
     pending_requests_.push(
         {.settings = HalCameraMetadata::Clone(request.settings.get()),
@@ -100,16 +120,55 @@
 std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers(
     uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
     const std::unordered_map<uint32_t, EmulatedStream>& streams,
-    uint32_t pipeline_id, HwlPipelineCallback cb) {
+    uint32_t pipeline_id, HwlPipelineCallback cb, int32_t override_width,
+    int32_t override_height) {
   if (buffers.empty()) {
     return nullptr;
   }
 
+  std::vector<StreamBuffer> requested_buffers;
+  for (auto& buffer : buffers) {
+    if (buffer.buffer != nullptr) {
+      requested_buffers.push_back(buffer);
+      continue;
+    }
+
+    if (session_callback_.request_stream_buffers != nullptr) {
+      std::vector<StreamBuffer> one_requested_buffer;
+      status_t res = session_callback_.request_stream_buffers(
+          buffer.stream_id, 1, &one_requested_buffer, frame_number);
+      if (res != OK) {
+        ALOGE("%s: request_stream_buffers failed: %s(%d)", __FUNCTION__,
+              strerror(-res), res);
+        continue;
+      }
+      if (one_requested_buffer.size() != 1 ||
+          one_requested_buffer[0].buffer == nullptr) {
+        ALOGE("%s: request_stream_buffers failed to return a valid buffer",
+              __FUNCTION__);
+        continue;
+      }
+      requested_buffers.push_back(one_requested_buffer[0]);
+    }
+  }
+
+  if (requested_buffers.size() < buffers.size()) {
+    ALOGE(
+        "%s: Failed to acquire all sensor buffers: %zu acquired, %zu requested",
+        __FUNCTION__, requested_buffers.size(), buffers.size());
+    // This only happens for HAL buffer manager use case.
+    if (session_callback_.return_stream_buffers != nullptr) {
+      session_callback_.return_stream_buffers(requested_buffers);
+    }
+    return nullptr;
+  }
+
   auto sensor_buffers = std::make_unique<Buffers>();
-  sensor_buffers->reserve(buffers.size());
-  for (const auto& buffer : buffers) {
+  sensor_buffers->reserve(requested_buffers.size());
+  for (auto& buffer : requested_buffers) {
     auto sensor_buffer = CreateSensorBuffer(
-        frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer);
+        frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer,
+        override_width, override_height);
     if (sensor_buffer.get() != nullptr) {
       sensor_buffers->push_back(std::move(sensor_buffer));
     }
@@ -152,8 +211,8 @@
 }
 
 status_t EmulatedRequestProcessor::GetBufferSizeAndStride(
-    const EmulatedStream& stream, uint32_t* size /*out*/,
-    uint32_t* stride /*out*/) {
+    const EmulatedStream& stream, buffer_handle_t buffer,
+    uint32_t* size /*out*/, uint32_t* stride /*out*/) {
   if (size == nullptr) {
     return BAD_VALUE;
   }
@@ -165,7 +224,6 @@
       break;
     case HAL_PIXEL_FORMAT_RGBA_8888:
       *stride = stream.width * 4;
-      ;
       *size = (*stride) * stream.height;
       break;
     case HAL_PIXEL_FORMAT_Y16:
@@ -185,7 +243,9 @@
       }
       break;
     case HAL_PIXEL_FORMAT_RAW16:
-      *stride = stream.width * 2;
+      if (importer_->getMonoPlanarStrideBytes(buffer, stride) != NO_ERROR) {
+        *stride = stream.width * 2;
+      }
       *size = (*stride) * stream.height;
       break;
     default:
@@ -196,18 +256,19 @@
 }
 
 status_t EmulatedRequestProcessor::LockSensorBuffer(
-    const EmulatedStream& stream, HandleImporter& importer /*in*/,
-    buffer_handle_t buffer, SensorBuffer* sensor_buffer /*out*/) {
+    const EmulatedStream& stream, buffer_handle_t buffer, int32_t width,
+    int32_t height, SensorBuffer* sensor_buffer /*out*/) {
   if (sensor_buffer == nullptr) {
     return BAD_VALUE;
   }
 
-  auto width = static_cast<int32_t>(stream.width);
-  auto height = static_cast<int32_t>(stream.height);
   auto usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN;
-  if (stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  bool isYUV_420_888 = stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888;
+  bool isP010 = static_cast<android_pixel_format_v1_1_t>(
+                    stream.override_format) == HAL_PIXEL_FORMAT_YCBCR_P010;
+  if ((isYUV_420_888) || (isP010)) {
     IMapper::Rect map_rect = {0, 0, width, height};
-    auto yuv_layout = importer.lockYCbCr(buffer, usage, map_rect);
+    auto yuv_layout = importer_->lockYCbCr(buffer, usage, map_rect);
     if ((yuv_layout.y != nullptr) && (yuv_layout.cb != nullptr) &&
         (yuv_layout.cr != nullptr)) {
       sensor_buffer->plane.img_y_crcb.img_y =
@@ -219,7 +280,7 @@
       sensor_buffer->plane.img_y_crcb.y_stride = yuv_layout.yStride;
       sensor_buffer->plane.img_y_crcb.cbcr_stride = yuv_layout.cStride;
       sensor_buffer->plane.img_y_crcb.cbcr_step = yuv_layout.chromaStep;
-      if ((yuv_layout.chromaStep == 2) &&
+      if (isYUV_420_888 && (yuv_layout.chromaStep == 2) &&
           std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
                    sensor_buffer->plane.img_y_crcb.img_cr) != 1) {
         ALOGE("%s: Unsupported YUV layout, chroma step: %u U/V plane delta: %u",
@@ -229,13 +290,14 @@
                            sensor_buffer->plane.img_y_crcb.img_cr)));
         return BAD_VALUE;
       }
+      sensor_buffer->plane.img_y_crcb.bytesPerPixel = isP010 ? 2 : 1;
     } else {
       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
       return BAD_VALUE;
     }
   } else {
     uint32_t buffer_size = 0, stride = 0;
-    auto ret = GetBufferSizeAndStride(stream, &buffer_size, &stride);
+    auto ret = GetBufferSizeAndStride(stream, buffer, &buffer_size, &stride);
     if (ret != OK) {
       ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__,
             stream.override_format);
@@ -243,17 +305,17 @@
     }
     if (stream.override_format == HAL_PIXEL_FORMAT_BLOB) {
       sensor_buffer->plane.img.img =
-          static_cast<uint8_t*>(importer.lock(buffer, usage, buffer_size));
+          static_cast<uint8_t*>(importer_->lock(buffer, usage, buffer_size));
     } else {
       IMapper::Rect region{0, 0, width, height};
       sensor_buffer->plane.img.img =
-          static_cast<uint8_t*>(importer.lock(buffer, usage, region));
+          static_cast<uint8_t*>(importer_->lock(buffer, usage, region));
     }
     if (sensor_buffer->plane.img.img == nullptr) {
       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
       return BAD_VALUE;
     }
-    sensor_buffer->plane.img.stride = stride;
+    sensor_buffer->plane.img.stride_in_bytes = stride;
     sensor_buffer->plane.img.buffer_size = buffer_size;
   }
 
@@ -263,8 +325,9 @@
 std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer(
     uint32_t frame_number, const EmulatedStream& emulated_stream,
     uint32_t pipeline_id, HwlPipelineCallback callback,
-    StreamBuffer stream_buffer) {
-  auto buffer = std::make_unique<SensorBuffer>();
+    StreamBuffer stream_buffer, int32_t override_width,
+    int32_t override_height) {
+  auto buffer = std::make_unique<SensorBuffer>(importer_);
 
   auto stream = emulated_stream;
   // Make sure input stream formats are correctly mapped here
@@ -272,9 +335,14 @@
     stream.override_format =
         EmulatedSensor::OverrideFormat(stream.override_format);
   }
-  buffer->width = stream.width;
-  buffer->height = stream.height;
-  buffer->format = stream.override_format;
+  if (override_width > 0 && override_height > 0) {
+    buffer->width = override_width;
+    buffer->height = override_height;
+  } else {
+    buffer->width = stream.width;
+    buffer->height = stream.height;
+  }
+  buffer->format = static_cast<PixelFormat>(stream.override_format);
   buffer->dataSpace = stream.override_data_space;
   buffer->stream_buffer = stream_buffer;
   buffer->pipeline_id = pipeline_id;
@@ -287,15 +355,9 @@
   // In case buffer processing is successful, flip this flag accordingly
   buffer->stream_buffer.status = BufferStatus::kError;
 
-  if (!buffer->importer.importBuffer(buffer->stream_buffer.buffer)) {
-    ALOGE("%s: Failed importing stream buffer!", __FUNCTION__);
-    buffer.release();
-    buffer = nullptr;
-  }
-
-  if (buffer.get() != nullptr) {
-    auto ret = LockSensorBuffer(stream, buffer->importer,
-                                buffer->stream_buffer.buffer, buffer.get());
+  if (buffer->stream_buffer.buffer != nullptr) {
+    auto ret = LockSensorBuffer(stream, buffer->stream_buffer.buffer,
+                                buffer->width, buffer->height, buffer.get());
     if (ret != OK) {
       buffer.release();
       buffer = nullptr;
@@ -303,8 +365,8 @@
   }
 
   if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) {
-    auto fence_status = buffer->importer.importFence(
-        stream_buffer.acquire_fence, buffer->acquire_fence_fd);
+    auto fence_status = importer_->importFence(stream_buffer.acquire_fence,
+                                               buffer->acquire_fence_fd);
     if (!fence_status) {
       ALOGE("%s: Failed importing acquire fence!", __FUNCTION__);
       buffer.release();
@@ -436,6 +498,12 @@
                                     std::move(physical_devices));
 }
 
+void EmulatedRequestProcessor::SetSessionCallback(
+    const HwlSessionCallback& hwl_session_callback) {
+  std::lock_guard<std::mutex> lock(process_mutex_);
+  session_callback_ = hwl_session_callback;
+}
+
 status_t EmulatedRequestProcessor::GetDefaultRequest(
     RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
   std::lock_guard<std::mutex> lock(process_mutex_);
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h
index 1eb609f..4f47e89 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h
@@ -29,25 +29,11 @@
 namespace android {
 
 using google_camera_hal::HalCameraMetadata;
-using google_camera_hal::HalStream;
-using google_camera_hal::HwlPipelineCallback;
 using google_camera_hal::HwlPipelineRequest;
+using google_camera_hal::HwlSessionCallback;
 using google_camera_hal::RequestTemplate;
 using google_camera_hal::StreamBuffer;
 
-struct EmulatedStream : public HalStream {
-  uint32_t width, height;
-  size_t buffer_size;
-  bool is_input;
-};
-
-struct EmulatedPipeline {
-  HwlPipelineCallback cb;
-  // stream id -> stream map
-  std::unordered_map<uint32_t, EmulatedStream> streams;
-  uint32_t physical_camera_id, pipeline_id;
-};
-
 struct PendingRequest {
   std::unique_ptr<HalCameraMetadata> settings;
   std::unique_ptr<Buffers> input_buffers;
@@ -56,14 +42,17 @@
 
 class EmulatedRequestProcessor {
  public:
-  EmulatedRequestProcessor(uint32_t camera_id, sp<EmulatedSensor> sensor);
+  EmulatedRequestProcessor(uint32_t camera_id, sp<EmulatedSensor> sensor,
+                           const HwlSessionCallback& session_callback);
   virtual ~EmulatedRequestProcessor();
 
   // Process given pipeline requests and invoke the respective callback in a
   // separate thread
   status_t ProcessPipelineRequests(
-      uint32_t frame_number, const std::vector<HwlPipelineRequest>& requests,
-      const std::vector<EmulatedPipeline>& pipelines);
+      uint32_t frame_number, std::vector<HwlPipelineRequest>& requests,
+      const std::vector<EmulatedPipeline>& pipelines,
+      const DynamicStreamIdMapType& dynamic_stream_id_map,
+      bool use_default_physical_camera);
 
   status_t GetDefaultRequest(
       RequestTemplate type,
@@ -74,6 +63,8 @@
   status_t Initialize(std::unique_ptr<HalCameraMetadata> static_meta,
                       PhysicalDeviceMapPtr physical_devices);
 
+  void SetSessionCallback(const HwlSessionCallback& hwl_session_callback);
+
  private:
   void RequestProcessorLoop();
 
@@ -86,22 +77,22 @@
     return (delta == 0) ? value : (value + (alignment - delta));
   }
 
+  // Return buffer size and row stride in bytes
   status_t GetBufferSizeAndStride(const EmulatedStream& stream,
-                                  uint32_t* size /*out*/,
+                                  buffer_handle_t buffer, uint32_t* size /*out*/,
                                   uint32_t* stride /*out*/);
   status_t LockSensorBuffer(const EmulatedStream& stream,
-                            HandleImporter& importer /*in*/,
-                            buffer_handle_t buffer,
-                            SensorBuffer* sensor_buffer /*out*/);
+                            buffer_handle_t buffer, int32_t width,
+                            int32_t height, SensorBuffer* sensor_buffer /*out*/);
   std::unique_ptr<Buffers> CreateSensorBuffers(
       uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
       const std::unordered_map<uint32_t, EmulatedStream>& streams,
-      uint32_t pipeline_id, HwlPipelineCallback cb);
-  std::unique_ptr<SensorBuffer> CreateSensorBuffer(uint32_t frame_number,
-                                                   const EmulatedStream& stream,
-                                                   uint32_t pipeline_id,
-                                                   HwlPipelineCallback callback,
-                                                   StreamBuffer stream_buffer);
+      uint32_t pipeline_id, HwlPipelineCallback cb, int32_t override_width,
+      int32_t override_height);
+  std::unique_ptr<SensorBuffer> CreateSensorBuffer(
+      uint32_t frame_number, const EmulatedStream& stream, uint32_t pipeline_id,
+      HwlPipelineCallback callback, StreamBuffer stream_buffer,
+      int32_t override_width, int32_t override_height);
   std::unique_ptr<Buffers> AcquireBuffers(Buffers* buffers);
   void NotifyFailedRequest(const PendingRequest& request);
 
@@ -110,9 +101,11 @@
   std::queue<PendingRequest> pending_requests_;
   uint32_t camera_id_;
   sp<EmulatedSensor> sensor_;
+  HwlSessionCallback session_callback_;
   std::unique_ptr<EmulatedLogicalRequestState>
       request_state_;  // Stores and handles 3A and related camera states.
   std::unique_ptr<HalCameraMetadata> last_settings_;
+  std::shared_ptr<HandleImporter> importer_;
 
   EmulatedRequestProcessor(const EmulatedRequestProcessor&) = delete;
   EmulatedRequestProcessor& operator=(const EmulatedRequestProcessor&) = delete;
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
index 0b0609a..a1e553a 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
@@ -40,7 +40,8 @@
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING,
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA,
-};
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR};
 
 const std::set<uint8_t> EmulatedRequestState::kSupportedHWLevels = {
     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
@@ -505,9 +506,11 @@
         sensor_exposure_time_ = entry.data.i64[0];
       } else {
         ALOGE(
-            "%s: Sensor exposure time"
+            "%s: Sensor exposure time %" PRId64
             " not within supported range[%" PRId64 ", %" PRId64 "]",
-            __FUNCTION__, sensor_exposure_time_range_.first,
+            __FUNCTION__,
+            entry.data.i64[0],
+            sensor_exposure_time_range_.first,
             sensor_exposure_time_range_.second);
         // Use last valid value
       }
@@ -521,9 +524,10 @@
         sensor_frame_duration_ = entry.data.i64[0];
       } else {
         ALOGE(
-            "%s: Sensor frame duration "
+            "%s: Sensor frame duration %" PRId64
             " not within supported range[%" PRId64 ", %" PRId64 "]",
-            __FUNCTION__, EmulatedSensor::kSupportedFrameDurationRange[0],
+            __FUNCTION__, entry.data.i64[0],
+            EmulatedSensor::kSupportedFrameDurationRange[0],
             sensor_max_frame_duration_);
         // Use last valid value
       }
@@ -539,8 +543,9 @@
           (entry.data.i32[0] <= sensor_sensitivity_range_.second)) {
         sensor_sensitivity_ = entry.data.i32[0];
       } else {
-        ALOGE("%s: Sensor sensitivity not within supported range[%d, %d]",
-              __FUNCTION__, sensor_sensitivity_range_.first,
+        ALOGE("%s: Sensor sensitivity %d not within supported range[%d, %d]",
+              __FUNCTION__, entry.data.i32[0],
+              sensor_sensitivity_range_.first,
               sensor_sensitivity_range_.second);
         // Use last valid value
       }
@@ -619,6 +624,17 @@
     }
   }
 
+  ret = request_settings_->Get(ANDROID_SENSOR_PIXEL_MODE, &entry);
+  if ((ret == OK) && (entry.count == 1)) {
+    if (available_sensor_pixel_modes_.find(entry.data.u8[0]) !=
+        available_sensor_pixel_modes_.end()) {
+      sensor_pixel_mode_ = entry.data.u8[0];
+    } else {
+      ALOGE("%s: Unsupported control sensor pixel  mode!", __FUNCTION__);
+      return BAD_VALUE;
+    }
+  }
+
   ret = request_settings_->Get(ANDROID_CONTROL_SCENE_MODE, &entry);
   if ((ret == OK) && (entry.count == 1)) {
     // Disabled scene is not expected to be among the available scene list
@@ -698,6 +714,32 @@
     }
   }
 
+  // Check test pattern parameter
+  uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+  ret = request_settings_->Get(ANDROID_SENSOR_TEST_PATTERN_MODE, &entry);
+  if ((ret == OK) && (entry.count == 1)) {
+    if (available_test_pattern_modes_.find(entry.data.u8[0]) !=
+        available_test_pattern_modes_.end()) {
+      test_pattern_mode = entry.data.u8[0];
+    } else {
+      ALOGE("%s: Unsupported test pattern mode: %u", __FUNCTION__,
+            entry.data.u8[0]);
+      return BAD_VALUE;
+    }
+  }
+  uint32_t test_pattern_data[4] = {0, 0, 0, 0};
+  if (test_pattern_mode == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
+    ret = request_settings_->Get(ANDROID_SENSOR_TEST_PATTERN_DATA, &entry);
+    if ((ret == OK) && (entry.count == 4)) {
+      // 'Convert' from i32 to u32 here
+      memcpy(test_pattern_data, entry.data.i32, sizeof(test_pattern_data));
+    }
+  }
+  // BLACK is just SOLID_COLOR with all-zero data
+  if (test_pattern_mode == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
+    test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR;
+  }
+
   // 3A modes are active in case the scene is disabled or set to face priority
   // or the control mode is not using scenes
   if ((scene_mode_ == ANDROID_CONTROL_SCENE_MODE_DISABLED) ||
@@ -786,6 +828,10 @@
   sensor_settings->video_stab = vstab_mode;
   sensor_settings->report_edge_mode = report_edge_mode_;
   sensor_settings->edge_mode = edge_mode;
+  sensor_settings->sensor_pixel_mode = sensor_pixel_mode_;
+  sensor_settings->test_pattern_mode = test_pattern_mode;
+  memcpy(sensor_settings->test_pattern_data, test_pattern_data,
+         sizeof(sensor_settings->test_pattern_data));
 
   return OK;
 }
@@ -804,6 +850,9 @@
   result->result_metadata->Set(ANDROID_REQUEST_PIPELINE_DEPTH,
                                &max_pipeline_depth_, 1);
   result->result_metadata->Set(ANDROID_CONTROL_MODE, &control_mode_, 1);
+  result->result_metadata->Set(ANDROID_SENSOR_PIXEL_MODE, &sensor_pixel_mode_,
+                               1);
+
   result->result_metadata->Set(ANDROID_CONTROL_AF_MODE, &af_mode_, 1);
   result->result_metadata->Set(ANDROID_CONTROL_AF_STATE, &af_state_, 1);
   result->result_metadata->Set(ANDROID_CONTROL_AWB_MODE, &awb_mode_, 1);
@@ -920,6 +969,9 @@
   }
   if (zoom_ratio_supported_) {
     result->result_metadata->Set(ANDROID_CONTROL_ZOOM_RATIO, &zoom_ratio_, 1);
+    result->result_metadata->Set(ANDROID_SCALER_CROP_REGION,
+                                 scaler_crop_region_default_,
+                                 ARRAY_SIZE(scaler_crop_region_default_));
   }
   if (report_extended_scene_mode_) {
     result->result_metadata->Set(ANDROID_CONTROL_EXTENDED_SCENE_MODE,
@@ -1077,6 +1129,8 @@
   int32_t test_pattern_mode = (off_test_pattern_mode_supported)
                                   ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
                                   : *available_test_pattern_modes_.begin();
+  int32_t test_pattern_data[4] = {0, 0, 0, 0};
+
   for (size_t idx = 0; idx < kTemplateCount; idx++) {
     if (default_requests_[idx].get() == nullptr) {
       continue;
@@ -1090,6 +1144,8 @@
                                 &sensor_sensitivity_, 1);
     default_requests_[idx]->Set(ANDROID_SENSOR_TEST_PATTERN_MODE,
                                 &test_pattern_mode, 1);
+    default_requests_[idx]->Set(ANDROID_SENSOR_TEST_PATTERN_DATA,
+                                test_pattern_data, 4);
   }
 
   return OK;
@@ -1246,10 +1302,10 @@
                               &overrides_entry);
   if ((ret == OK) && ((overrides_entry.count / 3) == available_scenes_.size()) &&
       ((overrides_entry.count % 3) == 0)) {
-    for (size_t i = 0; i < entry.count; i += 3) {
-      SceneOverride scene(overrides_entry.data.u8[i],
-                          overrides_entry.data.u8[i + 1],
-                          overrides_entry.data.u8[i + 2]);
+    for (size_t i = 0; i < entry.count; i++) {
+      SceneOverride scene(overrides_entry.data.u8[i*3],
+                          overrides_entry.data.u8[i*3 + 1],
+                          overrides_entry.data.u8[i*3 + 2]);
       if (available_ae_modes_.find(scene.ae_mode) == available_ae_modes_.end()) {
         ALOGE("%s: AE scene mode override: %d not supported!", __FUNCTION__,
               scene.ae_mode);
@@ -1567,6 +1623,14 @@
     return BAD_VALUE;
   }
 
+  available_sensor_pixel_modes_.insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+
+  if (SupportsCapability(
+          ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR)) {
+    available_sensor_pixel_modes_.insert(
+        ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+  }
+
   // Auto mode must always be present
   if (available_control_modes_.find(ANDROID_CONTROL_MODE_AUTO) ==
       available_control_modes_.end()) {
@@ -1771,7 +1835,8 @@
         float min_zoom_ratio, max_zoom_ratio;
 
         if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED ||
-            mode > ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) {
+            (mode > ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS &&
+             mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START)) {
           ALOGE("%s: Invalid extended scene mode %d", __FUNCTION__, mode);
           return BAD_VALUE;
         }
@@ -2639,7 +2704,8 @@
 }
 
 status_t EmulatedRequestState::InitializeReprocessDefaults() {
-  if (supports_private_reprocessing_ || supports_yuv_reprocessing_) {
+  if (supports_private_reprocessing_ || supports_yuv_reprocessing_ ||
+      supports_remosaic_reprocessing_) {
     StreamConfigurationMap config_map(*static_metadata_);
     if (!config_map.SupportsReprocessing()) {
       ALOGE(
@@ -2740,6 +2806,8 @@
       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   supports_yuv_reprocessing_ = SupportsCapability(
       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+  supports_remosaic_reprocessing_ = SupportsCapability(
+      ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING);
   is_backward_compatible_ = SupportsCapability(
       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   is_raw_capable_ =
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.h b/devices/EmulatedCamera/hwl/EmulatedRequestState.h
index 942f42f..1591422 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestState.h
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.h
@@ -30,7 +30,6 @@
 using google_camera_hal::HwlPipelineCallback;
 using google_camera_hal::HwlPipelineRequest;
 using google_camera_hal::RequestTemplate;
-using google_camera_hal::StreamBuffer;
 
 struct PendingRequest;
 
@@ -128,6 +127,7 @@
   bool is_raw_capable_ = false;
   bool supports_private_reprocessing_ = false;
   bool supports_yuv_reprocessing_ = false;
+  bool supports_remosaic_reprocessing_ = false;
 
   // android.control.*
   struct SceneOverride {
@@ -174,6 +174,7 @@
   std::set<uint8_t> available_antibanding_modes_;
   std::set<uint8_t> available_effects_;
   std::set<uint8_t> available_vstab_modes_;
+  std::set<uint8_t> available_sensor_pixel_modes_;
   std::vector<ExtendedSceneModeCapability> available_extended_scene_mode_caps_;
   std::unordered_map<uint8_t, SceneOverride> scene_overrides_;
   std::vector<FPSRange> available_fps_ranges_;
@@ -191,6 +192,7 @@
   size_t max_awb_regions_ = 0;
   size_t max_af_regions_ = 0;
   uint8_t control_mode_ = ANDROID_CONTROL_MODE_AUTO;
+  uint8_t sensor_pixel_mode_ = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
   uint8_t scene_mode_ = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   uint8_t ae_mode_ = ANDROID_CONTROL_AE_MODE_ON;
   uint8_t awb_mode_ = ANDROID_CONTROL_AWB_MODE_AUTO;
@@ -293,7 +295,7 @@
   std::set<uint8_t> available_ois_modes_;
   uint8_t ois_mode_ = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   bool report_ois_mode_ = false;
-  float pose_rotation_[5] = {.0f};
+  float pose_rotation_[4] = {.0f};
   float pose_translation_[3] = {.0f};
   float distortion_[5] = {.0f};
   float intrinsic_calibration_[5] = {.0f};
diff --git a/devices/EmulatedCamera/hwl/EmulatedScene.cpp b/devices/EmulatedCamera/hwl/EmulatedScene.cpp
index 595adce..0fe5615 100644
--- a/devices/EmulatedCamera/hwl/EmulatedScene.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedScene.cpp
@@ -201,6 +201,14 @@
   exposure_duration_ = seconds;
 }
 
+void EmulatedScene::SetTestPattern(bool enabled) {
+  test_pattern_mode_ = enabled;
+}
+
+void EmulatedScene::SetTestPatternData(uint32_t data[4]) {
+  memcpy(test_pattern_data_, data, 4);
+}
+
 void EmulatedScene::CalculateScene(nsecs_t time, int32_t handshake_divider) {
   // Calculate time fractions for interpolation
   int time_idx = hour_ / kTimeStep;
@@ -493,6 +501,8 @@
 }
 
 const uint32_t* EmulatedScene::GetPixelElectrons() {
+  if (test_pattern_mode_) return test_pattern_data_;
+
   const uint32_t* pixel = current_scene_material_;
   current_x_++;
   sub_x_++;
diff --git a/devices/EmulatedCamera/hwl/EmulatedScene.h b/devices/EmulatedCamera/hwl/EmulatedScene.h
index b0def35..0334a58 100644
--- a/devices/EmulatedCamera/hwl/EmulatedScene.h
+++ b/devices/EmulatedCamera/hwl/EmulatedScene.h
@@ -69,6 +69,11 @@
   // Must be called before calculateScene
   void SetExposureDuration(float seconds);
 
+  // Set test pattern mode; this draws a solid-color image set to the color
+  // defined by test pattern data
+  void SetTestPattern(bool enabled);
+  void SetTestPatternData(uint32_t data[4]);
+
   // Calculate scene information for current hour and the time offset since
   // the hour. Resets pixel readout location to 0,0
   void CalculateScene(nsecs_t time, int32_t handshake_divider);
@@ -145,6 +150,9 @@
   float exposure_duration_;
   float sensor_sensitivity_;  // electrons per lux-second
 
+  bool test_pattern_mode_;  // SOLID_COLOR only
+  uint32_t test_pattern_data_[4];
+
   enum Materials {
     GRASS = 0,
     GRASS_SHADOW,
diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
index cc92d62..6b6d678 100644
--- a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
@@ -154,6 +154,13 @@
     return false;
   }
 
+  if ((characteristics.full_res_width == 0) ||
+      (characteristics.full_res_height == 0)) {
+    ALOGE("%s: Invalid sensor full res size %zux%zu", __FUNCTION__,
+          characteristics.full_res_width, characteristics.full_res_height);
+    return false;
+  }
+
   if ((characteristics.exposure_time_range[0] >=
        characteristics.exposure_time_range[1]) ||
       ((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
@@ -244,15 +251,74 @@
   return true;
 }
 
-bool EmulatedSensor::IsStreamCombinationSupported(
-    const StreamConfiguration& config, StreamConfigurationMap& map,
-    const SensorCharacteristics& sensor_chars) {
-  uint32_t raw_stream_count = 0;
-  uint32_t input_stream_count = 0;
-  uint32_t processed_stream_count = 0;
-  uint32_t stalling_stream_count = 0;
+static void SplitStreamCombination(
+    const StreamConfiguration& original_config,
+    StreamConfiguration* default_mode_config,
+    StreamConfiguration* max_resolution_mode_config,
+    StreamConfiguration* input_stream_config) {
+  // Go through the streams
+  if (default_mode_config == nullptr || max_resolution_mode_config == nullptr ||
+      input_stream_config == nullptr) {
+    ALOGE("%s: Input stream / output stream configs are nullptr", __FUNCTION__);
+    return;
+  }
+  for (const auto& stream : original_config.streams) {
+    if (stream.stream_type == google_camera_hal::StreamType::kInput) {
+      input_stream_config->streams.push_back(stream);
+      continue;
+    }
+    if (stream.used_in_default_resolution_mode) {
+      default_mode_config->streams.push_back(stream);
+    }
+    if (stream.used_in_max_resolution_mode) {
+      max_resolution_mode_config->streams.push_back(stream);
+    }
+  }
+}
 
+bool EmulatedSensor::IsStreamCombinationSupported(
+    uint32_t logical_id, const StreamConfiguration& config,
+    StreamConfigurationMap& default_config_map,
+    StreamConfigurationMap& max_resolution_config_map,
+    const PhysicalStreamConfigurationMap& physical_map,
+    const PhysicalStreamConfigurationMap& physical_map_max_resolution,
+    const LogicalCharacteristics& sensor_chars) {
+  StreamConfiguration default_mode_config, max_resolution_mode_config,
+      input_stream_config;
+  SplitStreamCombination(config, &default_mode_config,
+                         &max_resolution_mode_config, &input_stream_config);
+
+  return IsStreamCombinationSupported(logical_id, default_mode_config,
+                                      default_config_map, physical_map,
+                                      sensor_chars) &&
+         IsStreamCombinationSupported(
+             logical_id, max_resolution_mode_config, max_resolution_config_map,
+             physical_map_max_resolution, sensor_chars, /*is_max_res*/ true) &&
+
+         (IsStreamCombinationSupported(logical_id, input_stream_config,
+                                       default_config_map, physical_map,
+                                       sensor_chars) ||
+          IsStreamCombinationSupported(
+              logical_id, input_stream_config, max_resolution_config_map,
+              physical_map_max_resolution, sensor_chars, /*is_max_res*/ true));
+}
+
+bool EmulatedSensor::IsStreamCombinationSupported(
+    uint32_t logical_id, const StreamConfiguration& config,
+    StreamConfigurationMap& config_map,
+    const PhysicalStreamConfigurationMap& physical_map,
+    const LogicalCharacteristics& sensor_chars, bool is_max_res) {
+  uint32_t input_stream_count = 0;
+  // Map from physical camera id to number of streams for that physical camera
+  std::map<uint32_t, uint32_t> raw_stream_count;
+  std::map<uint32_t, uint32_t> processed_stream_count;
+  std::map<uint32_t, uint32_t> stalling_stream_count;
+
+  // Only allow the stream configurations specified in
+  // dynamicSizeStreamConfigurations.
   for (const auto& stream : config.streams) {
+    bool is_dynamic_output =
+        (stream.is_physical_camera_stream && stream.group_id != -1);
     if (stream.rotation != google_camera_hal::StreamRotation::kRotation0) {
       ALOGE("%s: Stream rotation: 0x%x not supported!", __FUNCTION__,
             stream.rotation);
@@ -260,13 +326,14 @@
     }
 
     if (stream.stream_type == google_camera_hal::StreamType::kInput) {
-      if (sensor_chars.max_input_streams == 0) {
+      if (sensor_chars.at(logical_id).max_input_streams == 0) {
         ALOGE("%s: Input streams are not supported on this device!",
               __FUNCTION__);
         return false;
       }
 
-      auto supported_outputs = map.GetValidOutputFormatsForInput(stream.format);
+      auto const& supported_outputs =
+          config_map.GetValidOutputFormatsForInput(stream.format);
       if (supported_outputs.empty()) {
         ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
               __FUNCTION__, stream.format);
@@ -275,6 +342,25 @@
 
       input_stream_count++;
     } else {
+      if (stream.is_physical_camera_stream &&
+          physical_map.find(stream.physical_camera_id) == physical_map.end()) {
+        ALOGE("%s: Invalid physical camera id %d", __FUNCTION__,
+              stream.physical_camera_id);
+        return false;
+      }
+
+      if (is_dynamic_output) {
+        auto dynamic_physical_output_formats =
+            physical_map.at(stream.physical_camera_id)
+                ->GetDynamicPhysicalStreamOutputFormats();
+        if (dynamic_physical_output_formats.find(stream.format) ==
+            dynamic_physical_output_formats.end()) {
+          ALOGE("%s: Unsupported physical stream format %d", __FUNCTION__,
+                stream.format);
+          return false;
+        }
+      }
+
       switch (stream.format) {
         case HAL_PIXEL_FORMAT_BLOB:
           if ((stream.data_space != HAL_DATASPACE_V0_JFIF) &&
@@ -283,53 +369,105 @@
                   stream.data_space);
             return false;
           }
-          stalling_stream_count++;
+          if (stream.is_physical_camera_stream) {
+            stalling_stream_count[stream.physical_camera_id]++;
+          } else {
+            for (const auto& p : physical_map) {
+              stalling_stream_count[p.first]++;
+            }
+          }
           break;
-        case HAL_PIXEL_FORMAT_RAW16:
-          raw_stream_count++;
-          break;
+        case HAL_PIXEL_FORMAT_RAW16: {
+          const SensorCharacteristics& sensor_char =
+              stream.is_physical_camera_stream
+                  ? sensor_chars.at(stream.physical_camera_id)
+                  : sensor_chars.at(logical_id);
+          auto sensor_height =
+              is_max_res ? sensor_char.full_res_height : sensor_char.height;
+          auto sensor_width =
+              is_max_res ? sensor_char.full_res_width : sensor_char.width;
+          if (stream.height != sensor_height || stream.width != sensor_width) {
+            ALOGE(
+                "%s, RAW16 buffer height %d and width %d must match sensor "
+                "height: %zu"
+                " and width: %zu",
+                __FUNCTION__, stream.height, stream.width, sensor_height,
+                sensor_width);
+            return false;
+          }
+          if (stream.is_physical_camera_stream) {
+            raw_stream_count[stream.physical_camera_id]++;
+          } else {
+            for (const auto& p : physical_map) {
+              raw_stream_count[p.first]++;
+            }
+          }
+        } break;
         default:
-          processed_stream_count++;
+          if (stream.is_physical_camera_stream) {
+            processed_stream_count[stream.physical_camera_id]++;
+          } else {
+            for (const auto& p : physical_map) {
+              processed_stream_count[p.first]++;
+            }
+          }
+      }
+
+      auto output_sizes =
+          is_dynamic_output
+              ? physical_map.at(stream.physical_camera_id)
+                    ->GetDynamicPhysicalStreamOutputSizes(stream.format)
+              : stream.is_physical_camera_stream
+                    ? physical_map.at(stream.physical_camera_id)
+                          ->GetOutputSizes(stream.format)
+                    : config_map.GetOutputSizes(stream.format);
+
+      auto stream_size = std::make_pair(stream.width, stream.height);
+      if (output_sizes.find(stream_size) == output_sizes.end()) {
+        ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
+              __FUNCTION__, stream.width, stream.height, stream.format);
+        return false;
       }
     }
+  }
 
-    auto output_sizes = map.GetOutputSizes(stream.format);
-    if (output_sizes.empty()) {
-      ALOGE("%s: Unsupported format: 0x%x", __FUNCTION__, stream.format);
-      return false;
-    }
-
-    auto stream_size = std::make_pair(stream.width, stream.height);
-    if (output_sizes.find(stream_size) == output_sizes.end()) {
-      ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
-            __FUNCTION__, stream.width, stream.height, stream.format);
+  for (const auto& raw_count : raw_stream_count) {
+    unsigned int max_raw_streams =
+        sensor_chars.at(raw_count.first).max_raw_streams +
+        (is_max_res
+             ? 1
+             : 0);  // The extra raw stream is allowed for remosaic reprocessing.
+    if (raw_count.second > max_raw_streams) {
+      ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
+            __FUNCTION__, raw_count.second, max_raw_streams);
       return false;
     }
   }
 
-  if (raw_stream_count > sensor_chars.max_raw_streams) {
-    ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
-          __FUNCTION__, raw_stream_count, sensor_chars.max_raw_streams);
-    return false;
+  for (const auto& stalling_count : stalling_stream_count) {
+    if (stalling_count.second >
+        sensor_chars.at(stalling_count.first).max_stalling_streams) {
+      ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
+            __FUNCTION__, stalling_count.second,
+            sensor_chars.at(stalling_count.first).max_stalling_streams);
+      return false;
+    }
   }
 
-  if (processed_stream_count > sensor_chars.max_processed_streams) {
-    ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
-          __FUNCTION__, processed_stream_count,
-          sensor_chars.max_processed_streams);
-    return false;
+  for (const auto& processed_count : processed_stream_count) {
+    if (processed_count.second >
+        sensor_chars.at(processed_count.first).max_processed_streams) {
+      ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
+            __FUNCTION__, processed_count.second,
+            sensor_chars.at(processed_count.first).max_processed_streams);
+      return false;
+    }
   }
 
-  if (stalling_stream_count > sensor_chars.max_stalling_streams) {
-    ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
-          __FUNCTION__, stalling_stream_count,
-          sensor_chars.max_stalling_streams);
-    return false;
-  }
-
-  if (input_stream_count > sensor_chars.max_input_streams) {
+  if (input_stream_count > sensor_chars.at(logical_id).max_input_streams) {
     ALOGE("%s: Input stream maximum %u exceeds supported maximum %u",
-          __FUNCTION__, input_stream_count, sensor_chars.max_input_streams);
+          __FUNCTION__, input_stream_count,
+          sensor_chars.at(logical_id).max_input_streams);
     return false;
   }
 
@@ -366,7 +504,7 @@
 
   logical_camera_id_ = logical_camera_id;
   scene_ = new EmulatedScene(
-      device_chars->second.width, device_chars->second.height,
+      device_chars->second.full_res_width, device_chars->second.full_res_height,
       kElectronsPerLuxSecond, device_chars->second.orientation,
       device_chars->second.is_front_facing);
   scene_->InitializeSensorQueue();
@@ -505,16 +643,14 @@
    */
   next_capture_time_ = frame_end_real_time;
 
+  sensor_binning_factor_info_.clear();
+
   bool reprocess_request = false;
   if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
     if (next_input_buffer->size() > 1) {
       ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
     }
-    if (next_input_buffer->at(0)->format != HAL_PIXEL_FORMAT_YCBCR_420_888) {
-      ALOGE(
-          "%s: Reprocess input format: 0x%x not supported! Skipping reprocess!",
-          __FUNCTION__, next_input_buffer->at(0)->format);
-    } else {
+
       camera_metadata_ro_entry_t entry;
       auto ret =
           next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
@@ -525,7 +661,6 @@
       }
 
       reprocess_request = true;
-    }
   }
 
   if ((next_buffers != nullptr) && (settings != nullptr)) {
@@ -556,12 +691,16 @@
         continue;
       }
 
+      sensor_binning_factor_info_[(*b)->camera_id].quad_bayer_sensor =
+          device_chars->second.quad_bayer_sensor;
+
       ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
              ns2ms(device_settings->second.exposure_time),
              device_settings->second.gain);
 
-      scene_->Initialize(device_chars->second.width,
-                         device_chars->second.height, kElectronsPerLuxSecond);
+      scene_->Initialize(device_chars->second.full_res_width,
+                         device_chars->second.full_res_height,
+                         kElectronsPerLuxSecond);
       scene_->SetExposureDuration((float)device_settings->second.exposure_time /
                                   1e9);
       scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
@@ -576,27 +715,107 @@
                                 device_chars->second.color_filter.bX,
                                 device_chars->second.color_filter.bY,
                                 device_chars->second.color_filter.bZ);
+      scene_->SetTestPattern(device_settings->second.test_pattern_mode ==
+                             ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
+      scene_->SetTestPatternData(device_settings->second.test_pattern_data);
+
       uint32_t handshake_divider =
         (device_settings->second.video_stab == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) ?
         kReducedSceneHandshake : kRegularSceneHandshake;
       scene_->CalculateScene(next_capture_time_, handshake_divider);
 
       (*b)->stream_buffer.status = BufferStatus::kOk;
+      bool max_res_mode = device_settings->second.sensor_pixel_mode;
+      sensor_binning_factor_info_[(*b)->camera_id].max_res_request =
+          max_res_mode;
       switch ((*b)->format) {
-        case HAL_PIXEL_FORMAT_RAW16:
+        case PixelFormat::RAW16:
+          sensor_binning_factor_info_[(*b)->camera_id].has_raw_stream = true;
+          break;
+        default:
+          sensor_binning_factor_info_[(*b)->camera_id].has_non_raw_stream = true;
+      }
+
+      // TODO: remove hack. Implement RAW -> YUV / JPEG reprocessing http://b/192382904
+      bool treat_as_reprocess =
+          (device_chars->second.quad_bayer_sensor && reprocess_request &&
+           (*next_input_buffer->begin())->format == PixelFormat::RAW16)
+              ? false
+              : reprocess_request;
+
+      switch ((*b)->format) {
+        case PixelFormat::RAW16:
           if (!reprocess_request) {
-            CaptureRaw((*b)->plane.img.img, device_settings->second.gain,
-                       (*b)->width, device_chars->second);
+            uint64_t min_full_res_raw_size =
+                2 * device_chars->second.full_res_width *
+                device_chars->second.full_res_height;
+            uint64_t min_default_raw_size =
+                2 * device_chars->second.width * device_chars->second.height;
+            bool default_mode_for_qb =
+                device_chars->second.quad_bayer_sensor && !max_res_mode;
+            size_t buffer_size = (*b)->plane.img.buffer_size;
+            if (default_mode_for_qb) {
+              if (buffer_size < min_default_raw_size) {
+                ALOGE(
+                    "%s: Output buffer size too small for RAW capture in "
+                    "default "
+                    "mode, "
+                    "expected %" PRIu64 ", got %zu, for camera id %d",
+                    __FUNCTION__, min_default_raw_size, buffer_size,
+                    (*b)->camera_id);
+                (*b)->stream_buffer.status = BufferStatus::kError;
+                break;
+              }
+            } else if (buffer_size < min_full_res_raw_size) {
+              ALOGE(
+                  "%s: Output buffer size too small for RAW capture in max res "
+                  "mode, "
+                  "expected %" PRIu64 ", got %zu, for camera id %d",
+                  __FUNCTION__, min_full_res_raw_size, buffer_size,
+                  (*b)->camera_id);
+              (*b)->stream_buffer.status = BufferStatus::kError;
+              break;
+            }
+            if (default_mode_for_qb) {
+              CaptureRawBinned(
+                  (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
+                  device_settings->second.gain, device_chars->second);
+            } else {
+              CaptureRawFullRes(
+                  (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
+                  device_settings->second.gain, device_chars->second);
+            }
           } else {
-            ALOGE("%s: Reprocess requests with output format %x no supported!",
+            if (!device_chars->second.quad_bayer_sensor) {
+              ALOGE(
+                  "%s: Reprocess requests with output format %x no supported!",
                   __FUNCTION__, (*b)->format);
-            (*b)->stream_buffer.status = BufferStatus::kError;
+              (*b)->stream_buffer.status = BufferStatus::kError;
+              break;
+            }
+            // Remosaic the RAW input buffer
+            if ((*next_input_buffer->begin())->width != (*b)->width ||
+                (*next_input_buffer->begin())->height != (*b)->height) {
+              ALOGE(
+                  "%s: RAW16 input dimensions %dx%d don't match output buffer "
+                  "dimensions %dx%d",
+                  __FUNCTION__, (*next_input_buffer->begin())->width,
+                  (*next_input_buffer->begin())->height, (*b)->width,
+                  (*b)->height);
+              (*b)->stream_buffer.status = BufferStatus::kError;
+              break;
+            }
+            ALOGV("%s remosaic Raw16 Image", __FUNCTION__);
+            RemosaicRAW16Image(
+                (uint16_t*)(*next_input_buffer->begin())->plane.img.img,
+                (uint16_t*)(*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
+                device_chars->second);
           }
           break;
-        case HAL_PIXEL_FORMAT_RGB_888:
+        case PixelFormat::RGB_888:
           if (!reprocess_request) {
             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
-                       (*b)->plane.img.stride, RGBLayout::RGB,
+                       (*b)->plane.img.stride_in_bytes, RGBLayout::RGB,
                        device_settings->second.gain, device_chars->second);
           } else {
             ALOGE("%s: Reprocess requests with output format %x no supported!",
@@ -604,10 +823,10 @@
             (*b)->stream_buffer.status = BufferStatus::kError;
           }
           break;
-        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case PixelFormat::RGBA_8888:
           if (!reprocess_request) {
             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
-                       (*b)->plane.img.stride, RGBLayout::RGBA,
+                       (*b)->plane.img.stride_in_bytes, RGBLayout::RGBA,
                        device_settings->second.gain, device_chars->second);
           } else {
             ALOGE("%s: Reprocess requests with output format %x no supported!",
@@ -615,15 +834,16 @@
             (*b)->stream_buffer.status = BufferStatus::kError;
           }
           break;
-        case HAL_PIXEL_FORMAT_BLOB:
+        case PixelFormat::BLOB:
           if ((*b)->dataSpace == HAL_DATASPACE_V0_JFIF) {
             YUV420Frame yuv_input{
-                .width =
-                    reprocess_request ? (*next_input_buffer->begin())->width : 0,
-                .height = reprocess_request
+                .width = treat_as_reprocess
+                             ? (*next_input_buffer->begin())->width
+                             : 0,
+                .height = treat_as_reprocess
                               ? (*next_input_buffer->begin())->height
                               : 0,
-                .planes = reprocess_request
+                .planes = treat_as_reprocess
                               ? (*next_input_buffer->begin())->plane.img_y_crcb
                               : YCbCrPlanes{}};
             auto jpeg_input = std::make_unique<JpegYUV420Input>();
@@ -645,9 +865,12 @@
 
             bool rotate =
                 device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
-            ProcessType process_type = reprocess_request ? REPROCESS :
-              (device_settings->second.edge_mode == ANDROID_EDGE_MODE_HIGH_QUALITY) ?
-              HIGH_QUALITY : REGULAR;
+            ProcessType process_type =
+                treat_as_reprocess ? REPROCESS
+                                   : (device_settings->second.edge_mode ==
+                                      ANDROID_EDGE_MODE_HIGH_QUALITY)
+                                         ? HIGH_QUALITY
+                                         : REGULAR;
             auto ret = ProcessYUV420(
                 yuv_input, yuv_output, device_settings->second.gain,
                 process_type, device_settings->second.zoom_ratio,
@@ -676,14 +899,14 @@
             (*b)->stream_buffer.status = BufferStatus::kError;
           }
           break;
-        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
-        case HAL_PIXEL_FORMAT_YCbCr_420_888: {
+        case PixelFormat::YCRCB_420_SP:
+        case PixelFormat::YCBCR_420_888: {
           YUV420Frame yuv_input{
               .width =
-                  reprocess_request ? (*next_input_buffer->begin())->width : 0,
+                  treat_as_reprocess ? (*next_input_buffer->begin())->width : 0,
               .height =
-                  reprocess_request ? (*next_input_buffer->begin())->height : 0,
-              .planes = reprocess_request
+                  treat_as_reprocess ? (*next_input_buffer->begin())->height : 0,
+              .planes = treat_as_reprocess
                             ? (*next_input_buffer->begin())->plane.img_y_crcb
                             : YCbCrPlanes{}};
           YUV420Frame yuv_output{.width = (*b)->width,
@@ -691,9 +914,12 @@
                                  .planes = (*b)->plane.img_y_crcb};
           bool rotate =
               device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
-          ProcessType process_type = reprocess_request ? REPROCESS :
-            (device_settings->second.edge_mode == ANDROID_EDGE_MODE_HIGH_QUALITY) ?
-            HIGH_QUALITY : REGULAR;
+          ProcessType process_type = treat_as_reprocess
+                                         ? REPROCESS
+                                         : (device_settings->second.edge_mode ==
+                                            ANDROID_EDGE_MODE_HIGH_QUALITY)
+                                               ? HIGH_QUALITY
+                                               : REGULAR;
           auto ret = ProcessYUV420(
               yuv_input, yuv_output, device_settings->second.gain,
               process_type, device_settings->second.zoom_ratio,
@@ -702,11 +928,12 @@
             (*b)->stream_buffer.status = BufferStatus::kError;
           }
         } break;
-        case HAL_PIXEL_FORMAT_Y16:
+        case PixelFormat::Y16:
           if (!reprocess_request) {
             if ((*b)->dataSpace == HAL_DATASPACE_DEPTH) {
               CaptureDepth((*b)->plane.img.img, device_settings->second.gain,
-                           (*b)->width, (*b)->height, (*b)->plane.img.stride,
+                           (*b)->width, (*b)->height,
+                           (*b)->plane.img.stride_in_bytes,
                            device_chars->second);
             } else {
               ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
@@ -719,6 +946,21 @@
             (*b)->stream_buffer.status = BufferStatus::kError;
           }
           break;
+        case PixelFormat::YCBCR_P010:
+            if (!reprocess_request) {
+              bool rotate = device_settings->second.rotate_and_crop ==
+                            ANDROID_SCALER_ROTATE_AND_CROP_90;
+              CaptureYUV420((*b)->plane.img_y_crcb, (*b)->width, (*b)->height,
+                            device_settings->second.gain,
+                            device_settings->second.zoom_ratio, rotate,
+                            device_chars->second);
+            } else {
+              ALOGE(
+                  "%s: Reprocess requests with output format %x no supported!",
+                  __FUNCTION__, (*b)->format);
+              (*b)->stream_buffer.status = BufferStatus::kError;
+            }
+            break;
         default:
           ALOGE("%s: Unknown format %x, no output", __FUNCTION__, (*b)->format);
           (*b)->stream_buffer.status = BufferStatus::kError;
@@ -751,7 +993,8 @@
   // the occasional bump during 'ReturnResults' should not have any
   // noticeable effect.
   if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
-    ReturnResults(callback, std::move(settings), std::move(next_result));
+    ReturnResults(callback, std::move(settings), std::move(next_result),
+                  reprocess_request);
   }
 
   work_done_real_time = systemTime();
@@ -771,7 +1014,8 @@
   ALOGVV("Frame cycle took %" PRIu64 "  ms, target %" PRIu64 " ms",
          ns2ms(end_real_time - start_real_time), ns2ms(frame_duration));
 
-  ReturnResults(callback, std::move(settings), std::move(next_result));
+  ReturnResults(callback, std::move(settings), std::move(next_result),
+                reprocess_request);
 
   return true;
 };
@@ -779,7 +1023,7 @@
 void EmulatedSensor::ReturnResults(
     HwlPipelineCallback callback,
     std::unique_ptr<LogicalCameraSettings> settings,
-    std::unique_ptr<HwlPipelineResult> result) {
+    std::unique_ptr<HwlPipelineResult> result, bool reprocess_request) {
   if ((callback.process_pipeline_result != nullptr) &&
       (result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
     auto logical_settings = settings->find(logical_camera_id_);
@@ -794,9 +1038,20 @@
             logical_camera_id_);
       return;
     }
-
     result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
                                  1);
+    uint8_t raw_binned_factor_used = false;
+    if (sensor_binning_factor_info_.find(logical_camera_id_) !=
+        sensor_binning_factor_info_.end()) {
+      auto& info = sensor_binning_factor_info_[logical_camera_id_];
+      // Logical stream was included in the request
+      if (!reprocess_request && info.quad_bayer_sensor && info.max_res_request &&
+          info.has_raw_stream && !info.has_non_raw_stream) {
+        raw_binned_factor_used = true;
+      }
+      result->result_metadata->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+                                   &raw_binned_factor_used, 1);
+    }
     if (logical_settings->second.lens_shading_map_mode ==
         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
       if ((device_chars->second.lens_shading_map_size[0] > 0) &&
@@ -847,7 +1102,19 @@
                 __FUNCTION__, it.first);
           continue;
         }
-
+        uint8_t raw_binned_factor_used = false;
+        if (sensor_binning_factor_info_.find(it.first) !=
+            sensor_binning_factor_info_.end()) {
+          auto& info = sensor_binning_factor_info_[it.first];
+          // physical stream was included in the request
+          if (!reprocess_request && info.quad_bayer_sensor &&
+              info.max_res_request && info.has_raw_stream &&
+              !info.has_non_raw_stream) {
+            raw_binned_factor_used = true;
+          }
+          it.second->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+                         &raw_binned_factor_used, 1);
+        }
         // Sensor timestamp for all physical devices must be the same.
         it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
         if (physical_settings->second.report_neutral_color_point) {
@@ -892,24 +1159,151 @@
   }
 }
 
-void EmulatedSensor::CaptureRaw(uint8_t* img, uint32_t gain, uint32_t width,
-                                const SensorCharacteristics& chars) {
+EmulatedScene::ColorChannels EmulatedSensor::GetQuadBayerColor(uint32_t x,
+                                                               uint32_t y) {
+  // Row within larger set of quad bayer filter
+  uint32_t row_mod = y % 4;
+  // Column within larger set of quad bayer filter
+  uint32_t col_mod = x % 4;
+
+  // Row is within the left quadrants of a quad bayer sensor
+  if (row_mod < 2) {
+    if (col_mod < 2) {
+      return EmulatedScene::ColorChannels::R;
+    }
+    return EmulatedScene::ColorChannels::Gr;
+  } else {
+    if (col_mod < 2) {
+      return EmulatedScene::ColorChannels::Gb;
+    }
+    return EmulatedScene::ColorChannels::B;
+  }
+}
+
+void EmulatedSensor::RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
+                                            int xstart, int ystart,
+                                            int row_stride_in_bytes) {
+  uint32_t quad_block_copy_idx_map[16] = {0, 2, 1, 3, 8,  10, 6,  11,
+                                          4, 9, 5, 7, 12, 14, 13, 15};
+  uint16_t quad_block_copy[16];
+  uint32_t i = 0;
+  for (uint32_t row = 0; row < 4; row++) {
+    uint16_t* quad_bayer_row =
+        img_in + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
+    for (uint32_t j = 0; j < 4; j++, i++) {
+      quad_block_copy[i] = quad_bayer_row[j];
+    }
+  }
+
+  for (uint32_t row = 0; row < 4; row++) {
+    uint16_t* regular_bayer_row =
+        img_out + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
+    for (uint32_t j = 0; j < 4; j++, i++) {
+      uint32_t idx = quad_block_copy_idx_map[row + 4 * j];
+      regular_bayer_row[j] = quad_block_copy[idx];
+    }
+  }
+}
+
+status_t EmulatedSensor::RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
+                                            size_t row_stride_in_bytes,
+                                            const SensorCharacteristics& chars) {
+  if (chars.full_res_width % 2 != 0 || chars.full_res_height % 2 != 0) {
+    ALOGE(
+        "%s RAW16 Image with quad CFA, height %zu and width %zu, not multiples "
+        "of 4",
+        __FUNCTION__, chars.full_res_height, chars.full_res_width);
+    return BAD_VALUE;
+  }
+  for (uint32_t i = 0; i < chars.full_res_width; i += 4) {
+    for (uint32_t j = 0; j < chars.full_res_height; j += 4) {
+      RemosaicQuadBayerBlock(img_in, img_out, i, j, row_stride_in_bytes);
+    }
+  }
+  return OK;
+}
+
+void EmulatedSensor::CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes,
+                                      uint32_t gain,
+                                      const SensorCharacteristics& chars) {
+  ATRACE_CALL();
+  // inc = how many pixels to skip while reading every next pixel
+  float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
+  float noise_var_gain = total_gain * total_gain;
+  float read_noise_var =
+      kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
+  int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
+                         EmulatedScene::B};
+  scene_->SetReadoutPixel(0, 0);
+  for (unsigned int out_y = 0; out_y < chars.height; out_y++) {
+    // Stride still stays width since the buffer is binned size.
+    int* bayer_row = bayer_select + (out_y & 0x1) * 2;
+    uint16_t* px = (uint16_t*)img + out_y * (row_stride_in_bytes / 2);
+    for (unsigned int out_x = 0; out_x < chars.width; out_x++) {
+      int color_idx = bayer_row[out_x & 0x1];
+      uint16_t raw_count = 0;
+      // Color  filter will be the same for each quad.
+      uint32_t electron_count = 0;
+      int x, y;
+      float norm_x = (float)out_x / chars.width;
+      float norm_y = (float)out_y / chars.height;
+      x = static_cast<int>(chars.full_res_width * norm_x);
+      y = static_cast<int>(chars.full_res_height * norm_y);
+
+      x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
+      y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
+
+      scene_->SetReadoutPixel(x, y);
+
+      const uint32_t* pixel = scene_->GetPixelElectrons();
+      electron_count = pixel[color_idx];
+      // TODO: Better pixel saturation curve?
+      electron_count = (electron_count < kSaturationElectrons)
+                           ? electron_count
+                           : kSaturationElectrons;
+
+      // TODO: Better A/D saturation curve?
+      raw_count = electron_count * total_gain;
+      raw_count =
+          (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
+
+      // Calculate noise value
+      // TODO: Use more-correct Gaussian instead of uniform noise
+      float photon_noise_var = electron_count * noise_var_gain;
+      float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
+      // Scaled to roughly match gaussian/uniform noise stddev
+      float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+      raw_count += chars.black_level_pattern[color_idx];
+      raw_count += noise_stddev * noise_sample;
+      *px++ = raw_count;
+    }
+  }
+  ALOGVV("Binned RAW sensor image captured");
+}
+
+void EmulatedSensor::CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
+                                       uint32_t gain,
+                                       const SensorCharacteristics& chars) {
   ATRACE_CALL();
   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
   float noise_var_gain = total_gain * total_gain;
   float read_noise_var =
       kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
-  //
+
+  scene_->SetReadoutPixel(0, 0);
   // RGGB
   int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
                          EmulatedScene::B};
-  scene_->SetReadoutPixel(0, 0);
-  for (unsigned int y = 0; y < chars.height; y++) {
+
+  for (unsigned int y = 0; y < chars.full_res_height; y++) {
     int* bayer_row = bayer_select + (y & 0x1) * 2;
-    uint16_t* px = (uint16_t*)img + y * width;
-    for (unsigned int x = 0; x < chars.width; x++) {
+    uint16_t* px = (uint16_t*)img + y * (row_stride_in_bytes / 2);
+    for (unsigned int x = 0; x < chars.full_res_width; x++) {
+      int color_idx = chars.quad_bayer_sensor ? GetQuadBayerColor(x, y)
+                                              : bayer_row[x & 0x1];
       uint32_t electron_count;
-      electron_count = scene_->GetPixelElectrons()[bayer_row[x & 0x1]];
+      electron_count = scene_->GetPixelElectrons()[color_idx];
 
       // TODO: Better pixel saturation curve?
       electron_count = (electron_count < kSaturationElectrons)
@@ -928,7 +1322,7 @@
       // Scaled to roughly match gaussian/uniform noise stddev
       float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
 
-      raw_count += chars.black_level_pattern[bayer_row[x & 0x1]];
+      raw_count += chars.black_level_pattern[color_idx];
       raw_count += noise_stddev * noise_sample;
 
       *px++ = raw_count;
@@ -946,13 +1340,14 @@
   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
   // In fixed-point math, calculate total scaling from electrons to 8bpp
   int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
-  uint32_t inc_h = ceil((float)chars.width / width);
-  uint32_t inc_v = ceil((float)chars.height / height);
+  uint32_t inc_h = ceil((float)chars.full_res_width / width);
+  uint32_t inc_v = ceil((float)chars.full_res_height / height);
 
-  for (unsigned int y = 0, outy = 0; y < chars.height; y += inc_v, outy++) {
+  for (unsigned int y = 0, outy = 0; y < chars.full_res_height;
+       y += inc_v, outy++) {
     scene_->SetReadoutPixel(0, y);
     uint8_t* px = img + outy * stride;
-    for (unsigned int x = 0; x < chars.width; x += inc_h) {
+    for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
       uint32_t r_count, g_count, b_count;
       // TODO: Perfect demosaicing is a cheat
       const uint32_t* pixel = scene_->GetPixelElectrons();
@@ -1033,16 +1428,16 @@
       float norm_x = out_x / (width * zoom_ratio);
       float norm_y = out_y / (height * zoom_ratio);
       if (rotate) {
-        x = static_cast<int>(chars.width *
+        x = static_cast<int>(chars.full_res_width *
                              (norm_rot_left - norm_y * norm_rot_width));
-        y = static_cast<int>(chars.height *
+        y = static_cast<int>(chars.full_res_height *
                              (norm_rot_top + norm_x * norm_rot_height));
       } else {
-        x = static_cast<int>(chars.width * (norm_left_top + norm_x));
-        y = static_cast<int>(chars.height * (norm_left_top + norm_y));
+        x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
+        y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
       }
-      x = std::min(std::max(x, 0), (int)chars.width - 1);
-      y = std::min(std::max(y, 0), (int)chars.height - 1);
+      x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
+      y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
       scene_->SetReadoutPixel(x, y);
 
       int32_t r_count, g_count, b_count;
@@ -1061,16 +1456,38 @@
       g_count = gamma_table_[g_count];
       b_count = gamma_table_[b_count];
 
-      *px_y++ = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
-                 rgb_to_y[2] * b_count) /
-                scale_out_sq;
+      uint8_t y8 = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
+                    rgb_to_y[2] * b_count) /
+                   scale_out_sq;
+      if (yuv_layout.bytesPerPixel == 1) {
+        *px_y = y8;
+      } else if (yuv_layout.bytesPerPixel == 2) {
+        *(reinterpret_cast<uint16_t*>(px_y)) = htole16(y8 << 8);
+      } else {
+        ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
+              yuv_layout.bytesPerPixel);
+        return;
+      }
+      px_y += yuv_layout.bytesPerPixel;
+
       if (out_y % 2 == 0 && out_x % 2 == 0) {
-        *px_cb = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
-                  rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
-                 scale_out_sq;
-        *px_cr = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
-                  rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
-                 scale_out_sq;
+        uint8_t cb8 = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
+                       rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
+                      scale_out_sq;
+        uint8_t cr8 = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
+                       rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
+                      scale_out_sq;
+        if (yuv_layout.bytesPerPixel == 1) {
+          *px_cb = cb8;
+          *px_cr = cr8;
+        } else if (yuv_layout.bytesPerPixel == 2) {
+          *(reinterpret_cast<uint16_t*>(px_cb)) = htole16(cb8 << 8);
+          *(reinterpret_cast<uint16_t*>(px_cr)) = htole16(cr8 << 8);
+        } else {
+          ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
+                yuv_layout.bytesPerPixel);
+          return;
+        }
         px_cr += yuv_layout.cbcr_step;
         px_cb += yuv_layout.cbcr_step;
       }
@@ -1086,13 +1503,14 @@
   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
   // In fixed-point math, calculate scaling factor to 13bpp millimeters
   int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
-  uint32_t inc_h = ceil((float)chars.width / width);
-  uint32_t inc_v = ceil((float)chars.height / height);
+  uint32_t inc_h = ceil((float)chars.full_res_width / width);
+  uint32_t inc_v = ceil((float)chars.full_res_height / height);
 
-  for (unsigned int y = 0, out_y = 0; y < chars.height; y += inc_v, out_y++) {
+  for (unsigned int y = 0, out_y = 0; y < chars.full_res_height;
+       y += inc_v, out_y++) {
     scene_->SetReadoutPixel(0, y);
     uint16_t* px = (uint16_t*)(img + (out_y * stride));
-    for (unsigned int x = 0; x < chars.width; x += inc_h) {
+    for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
       uint32_t depth_count;
       // TODO: Make up real depth scene instead of using green channel
       // as depth
diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.h b/devices/EmulatedCamera/hwl/EmulatedSensor.h
index c49164d..3141b8b 100644
--- a/devices/EmulatedCamera/hwl/EmulatedSensor.h
+++ b/devices/EmulatedCamera/hwl/EmulatedSensor.h
@@ -116,6 +116,8 @@
 struct SensorCharacteristics {
   size_t width = 0;
   size_t height = 0;
+  size_t full_res_width = 0;
+  size_t full_res_height = 0;
   nsecs_t exposure_time_range[2] = {0};
   nsecs_t frame_duration_range[2] = {0};
   int32_t sensitivity_range[2] = {0};
@@ -134,6 +136,7 @@
   uint32_t max_pipeline_depth = 0;
   uint32_t orientation = 0;
   bool is_front_facing = false;
+  bool quad_bayer_sensor = false;
 };
 
 // Maps logical/physical camera ids to sensor characteristics
@@ -160,14 +163,29 @@
       return true;
     }
 
+    if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
+        HAL_PIXEL_FORMAT_RAW16 == output_format) {
+      return true;
+    }
+
     return false;
   }
 
   static bool AreCharacteristicsSupported(
       const SensorCharacteristics& characteristics);
+
   static bool IsStreamCombinationSupported(
-      const StreamConfiguration& config, StreamConfigurationMap& map,
-      const SensorCharacteristics& sensor_chars);
+      uint32_t logical_id, const StreamConfiguration& config,
+      StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
+      const PhysicalStreamConfigurationMap& physical_map,
+      const PhysicalStreamConfigurationMap& physical_map_max_resolution,
+      const LogicalCharacteristics& sensor_chars);
+
+  static bool IsStreamCombinationSupported(
+      uint32_t logical_id, const StreamConfiguration& config,
+      StreamConfigurationMap& map,
+      const PhysicalStreamConfigurationMap& physical_map,
+      const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
 
   /*
    * Power control
@@ -195,6 +213,9 @@
     uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
     bool report_edge_mode = false;
     uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
+    uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+    uint32_t test_pattern_data[4] = {0, 0, 0, 0};
   };
 
   // Maps physical and logical camera ids to individual device settings
@@ -293,10 +314,33 @@
 
   nsecs_t next_capture_time_;
 
+  struct SensorBinningFactorInfo {
+    bool has_raw_stream = false;
+    bool has_non_raw_stream = false;
+    bool quad_bayer_sensor = false;
+    bool max_res_request = false;
+  };
+
+  std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
+
   sp<EmulatedScene> scene_;
 
-  void CaptureRaw(uint8_t* img, uint32_t gain, uint32_t width,
-                  const SensorCharacteristics& chars);
+  static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
+
+  static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
+                                     int xstart, int ystart,
+                                     int row_stride_in_bytes);
+
+  static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
+                                     size_t row_stride_in_bytes,
+                                     const SensorCharacteristics& chars);
+
+  void CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
+                        const SensorCharacteristics& chars);
+
+  void CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
+                         uint32_t gain, const SensorCharacteristics& chars);
+
   enum RGBLayout { RGB, RGBA, ARGB };
   void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
                   uint32_t stride, RGBLayout layout, uint32_t gain,
@@ -328,7 +372,8 @@
 
   void ReturnResults(HwlPipelineCallback callback,
                      std::unique_ptr<LogicalCameraSettings> settings,
-                     std::unique_ptr<HwlPipelineResult> result);
+                     std::unique_ptr<HwlPipelineResult> result,
+                     bool reprocess_request);
 
   static float GetBaseGainFactor(float max_raw_value) {
     return max_raw_value / EmulatedSensor::kSaturationElectrons;
diff --git a/devices/EmulatedCamera/hwl/JpegCompressor.cpp b/devices/EmulatedCamera/hwl/JpegCompressor.cpp
index a92b4b0..57530ad 100644
--- a/devices/EmulatedCamera/hwl/JpegCompressor.cpp
+++ b/devices/EmulatedCamera/hwl/JpegCompressor.cpp
@@ -65,7 +65,7 @@
   ATRACE_CALL();
 
   if ((job->input.get() == nullptr) || (job->output.get() == nullptr) ||
-      (job->output->format != HAL_PIXEL_FORMAT_BLOB) ||
+      (job->output->format != PixelFormat::BLOB) ||
       (job->output->dataSpace != HAL_DATASPACE_V0_JFIF)) {
     ALOGE("%s: Unable to find buffers for JPEG source/destination",
           __FUNCTION__);
diff --git a/devices/EmulatedCamera/hwl/configs/emu_camera_back.json b/devices/EmulatedCamera/hwl/configs/emu_camera_back.json
index 87bd847..3f1e61e 100644
--- a/devices/EmulatedCamera/hwl/configs/emu_camera_back.json
+++ b/devices/EmulatedCamera/hwl/configs/emu_camera_back.json
@@ -48,11 +48,16 @@
    "0", 
    "1", 
    "1856", 
+   "1392", 
+   "64", 
+   "1856", 
    "1392" 
  ], 
  "android.control.availableExtendedSceneModeZoomRatioRanges": [
     "1.0", 
-    "4.0"
+    "4.0", 
+    "1.0", 
+    "4.0" 
  ], 
  "android.control.availableEffects": [
   "0"
@@ -506,6 +511,34 @@
   "1", 
   "1600", 
   "1200", 
+  "33331760", 
+  "34", 
+  "176", 
+  "144", 
+  "33331760", 
+  "35", 
+  "176", 
+  "144", 
+  "33331760", 
+  "33", 
+  "176", 
+  "144", 
+  "33331760", 
+  "34", 
+  "1024", 
+  "768", 
+  "33331760", 
+  "35", 
+  "1024", 
+  "768", 
+  "33331760", 
+  "33", 
+  "1024", 
+  "768", 
+  "33331760", 
+  "54", 
+  "1024", 
+  "768", 
   "33331760"
  ], 
  "android.scaler.availableStallDurations": [
@@ -521,6 +554,10 @@
   "1280", 
   "720", 
   "33331760", 
+  "33", 
+  "1024", 
+  "768", 
+  "33331760", 
   "34", 
   "160", 
   "120", 
@@ -572,6 +609,10 @@
   "1", 
   "1600", 
   "1200", 
+  "0", 
+  "54", 
+  "1024", 
+  "768", 
   "0"
  ], 
  "android.scaler.availableStreamConfigurations": [
@@ -646,6 +687,34 @@
   "INPUT", 
   "1600", 
   "1200", 
+  "OUTPUT", 
+  "34", 
+  "176", 
+  "144", 
+  "OUTPUT", 
+  "35", 
+  "176", 
+  "144", 
+  "OUTPUT", 
+  "33", 
+  "176", 
+  "144", 
+  "OUTPUT", 
+  "34", 
+  "1024", 
+  "768", 
+  "OUTPUT", 
+  "35", 
+  "1024", 
+  "768", 
+  "OUTPUT", 
+  "33", 
+  "1024", 
+  "768", 
+  "OUTPUT", 
+  "54", 
+  "1024", 
+  "768", 
   "OUTPUT"
  ], 
  "android.scaler.croppingType": [
@@ -712,7 +781,9 @@
   "1024"
  ], 
  "android.sensor.availableTestPatternModes": [
-  "0"
+  "0", 
+  "1", 
+  "5"
  ], 
  "android.sensor.blackLevelPattern": [
   "64", 
diff --git a/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json b/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json
index 8a53d72..e5909a7 100644
--- a/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json
+++ b/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json
@@ -468,7 +468,7 @@
   "TRUE"
  ], 
  "android.sensor.info.maxFrameDuration": [
-  "199985184"
+  "200000000"
  ], 
  "android.sensor.info.physicalSize": [
   "0.67199999", 
diff --git a/devices/EmulatedCamera/hwl/configs/emu_camera_front.json b/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
index 1e8d69b..e406561 100644
--- a/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
+++ b/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
@@ -176,7 +176,8 @@
   "android.edge.availableEdgeModes": [
    "1", 
    "2", 
-   "0"
+   "0",
+   "3"
   ], 
   "android.flash.info.available": [
    "TRUE"
@@ -210,7 +211,7 @@
    "240"
   ], 
   "android.jpeg.maxSize": [
-   "300000"
+   "3000000"
   ], 
   "android.lens.distortion": [
    "0.08807813", 
@@ -272,8 +273,6 @@
    "51", 
    "0", 
    "52", 
-   "0", 
-   "53", 
    "0"
   ], 
   "android.logicalMultiCamera.sensorSyncType": [
@@ -283,10 +282,8 @@
    "0", 
    "1", 
    "2", 
-   "3"
-  ], 
-  "android.reprocess.maxCaptureStall": [
-   "2"
+   "3",
+   "4"
   ], 
   "android.request.availableCapabilities": [
    "BACKWARD_COMPATIBLE", 
@@ -294,7 +291,35 @@
    "MANUAL_SENSOR", 
    "BURST_CAPTURE", 
    "MANUAL_POST_PROCESSING", 
-   "LOGICAL_MULTI_CAMERA"
+   "LOGICAL_MULTI_CAMERA", 
+   "PRIVATE_REPROCESSING",
+   "ULTRA_HIGH_RESOLUTION_SENSOR",
+   "YUV_REPROCESSING", 
+   "RAW",
+   "REMOSAIC_REPROCESSING"
+  ], 
+  "android.request.maxNumInputStreams": [
+   "1"
+  ], 
+  "android.scaler.availableInputOutputFormatsMap": [
+   "34", 
+   "2", 
+   "33", 
+   "35", 
+   "35", 
+   "2", 
+   "33", 
+   "35"
+  ], 
+  "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+   "32",
+   "3",
+   "32",
+   "33",
+   "35",
+  ], 
+  "android.reprocess.maxCaptureStall": [
+   "2"
   ], 
   "android.request.availableCharacteristicsKeys": [
    "4", 
@@ -460,7 +485,8 @@
    "1245186", 
    "1245187", 
    "1441792", 
-   "851985"
+   "851985",
+   "917536"
   ], 
   "android.request.availableResultKeys": [
    "0", 
@@ -535,10 +561,8 @@
    "1114123",
    "1703938", 
    "917530", 
-   "851985"
-  ], 
-  "android.request.maxNumInputStreams": [
-   "0"
+   "851985",
+   "917536"
   ], 
   "android.request.maxNumOutputStreams": [
    "1", 
@@ -558,6 +582,10 @@
    "8.00000000"
   ], 
   "android.scaler.availableMinFrameDurations": [
+   "32",
+   "1920",
+   "1440",
+   "33331760",
    "34", 
    "1920", 
    "1440", 
@@ -739,7 +767,25 @@
    "144", 
    "33331760"
   ], 
+"android.scaler.availableMinFrameDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "33331760",
+   "33",
+   "6048",
+   "4024",
+   "33331760",
+   "35",
+   "6048",
+   "4024",
+   "33331760"
+  ],
   "android.scaler.availableStallDurations": [
+   "32",
+   "1920",
+   "1440",
+   "17971200",
    "33", 
    "1920", 
    "1440", 
@@ -801,7 +847,25 @@
    "144", 
    "164736"
   ], 
+  "android.scaler.availableStallDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "287539200",
+   "33",
+   "6048",
+   "4024",
+   "287539200"
+   ],
   "android.scaler.availableStreamConfigurations": [
+   "32",
+   "1920",
+   "1440",
+   "OUTPUT",
+   "35", 
+   "1920", 
+   "1440", 
+   "INPUT", 
    "35", 
    "1920", 
    "1440", 
@@ -865,6 +929,10 @@
    "34", 
    "1920", 
    "1440", 
+   "INPUT", 
+   "34", 
+   "1920", 
+   "1440", 
    "OUTPUT", 
    "34", 
    "1920", 
@@ -979,15 +1047,33 @@
    "240", 
    "OUTPUT"
   ], 
+  "android.scaler.availableStreamConfigurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "INPUT",
+   "32",
+   "6048",
+   "4024",
+   "OUTPUT",
+   "33", 
+   "6048", 
+   "4024", 
+   "OUTPUT",
+   "35", 
+   "6048", 
+   "4024", 
+   "OUTPUT"
+  ],
   "android.scaler.croppingType": [
    "CENTER_ONLY"
   ], 
+  "android.scaler.multiResolutionStreamSupported": [
+   "TRUE"
+  ],
   "android.sensor.availableTestPatternModes": [
    "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
+   "5"
   ], 
   "android.sensor.blackLevelPattern": [
    "64", 
@@ -1121,6 +1207,12 @@
    "1920", 
    "1440"
   ], 
+  "android.sensor.info.activeArraySizeMaximumResolution": [
+   "0", 
+   "0", 
+   "6048", 
+   "4024"
+  ], 
   "android.sensor.info.colorFilterArrangement": [
    "RGGB"
   ], 
@@ -1148,6 +1240,20 @@
    "1920", 
    "1440"
   ], 
+  "android.sensor.info.pixelArraySizeMaximumResolution": [
+   "6048", 
+   "4024"
+  ], 
+  "android.sensor.info.binningFactor": [
+   "2",
+   "2"
+  ],
+  "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
+   "0", 
+   "0", 
+   "6048", 
+   "4024"
+  ], 
   "android.sensor.info.sensitivityRange": [
    "100", 
    "1000"
@@ -1189,11 +1295,15 @@
    "0"
   ], 
   "android.statistics.info.availableLensShadingMapModes": [
-   "0"
+   "0",
+   "1"
   ], 
   "android.statistics.info.maxFaceCount": [
    "10"
   ], 
+  "android.info.supportedBufferManagementVersion" : [
+    "HIDL_DEVICE_3_5" 
+  ], 
   "android.sync.maxLatency": [
    "PER_FRAME_CONTROL"
   ], 
@@ -1383,7 +1493,8 @@
   "android.edge.availableEdgeModes": [
    "1", 
    "2", 
-   "0"
+   "0", 
+   "3" 
   ], 
   "android.flash.info.available": [
    "TRUE"
@@ -1417,7 +1528,7 @@
    "240"
   ], 
   "android.jpeg.maxSize": [
-   "300000"
+   "3000000"
   ], 
   "android.lens.distortion": [
    "0.08807813", 
@@ -1480,17 +1591,41 @@
    "0", 
    "1", 
    "2", 
-   "3"
-  ], 
-  "android.reprocess.maxCaptureStall": [
-   "2"
+   "3", 
+   "4"
   ], 
   "android.request.availableCapabilities": [
    "BACKWARD_COMPATIBLE", 
    "READ_SENSOR_SETTINGS", 
    "MANUAL_SENSOR", 
    "BURST_CAPTURE", 
-   "MANUAL_POST_PROCESSING"
+   "MANUAL_POST_PROCESSING",
+   "PRIVATE_REPROCESSING",
+   "ULTRA_HIGH_RESOLUTION_SENSOR",
+   "YUV_REPROCESSING", 
+   "RAW",
+   "REMOSAIC_REPROCESSING"
+  ], 
+  "android.request.maxNumInputStreams": [
+   "1"
+  ], 
+  "android.scaler.availableInputOutputFormatsMap": [
+   "34", 
+   "2", 
+   "33", 
+   "35", 
+   "35", 
+   "2", 
+   "33", 
+   "35"
+  ], 
+  "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+   "32",
+   "1",
+   "32"
+  ],
+  "android.reprocess.maxCaptureStall": [
+   "2"
   ], 
   "android.request.availableCharacteristicsKeys": [
    "4", 
@@ -1596,7 +1731,8 @@
    "524295", 
    "524294", 
    "524298", 
-   "851984"
+   "851984", 
+   "851986"
   ], 
   "android.request.availableRequestKeys": [
    "0", 
@@ -1656,7 +1792,8 @@
    "1245186", 
    "1245187", 
    "1441792", 
-   "851985"
+   "851985",
+   "917536"
   ], 
   "android.request.availableResultKeys": [
    "0", 
@@ -1730,10 +1867,8 @@
    "65552",
    "1114123",
    "917530", 
-   "851985"
-  ], 
-  "android.request.maxNumInputStreams": [
-   "0"
+   "851985",
+   "917536"
   ], 
   "android.request.maxNumOutputStreams": [
    "1", 
@@ -1753,6 +1888,10 @@
    "8.00000000"
   ], 
   "android.scaler.availableMinFrameDurations": [
+   "32",
+   "1920",
+   "1440",
+   "33331760",
    "34", 
    "1920", 
    "1440", 
@@ -1934,7 +2073,25 @@
    "144", 
    "33331760"
   ], 
+  "android.scaler.availableMinFrameDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "33331760",
+   "33",
+   "6048",
+   "4024",
+   "33331760",
+   "35",
+   "6048",
+   "4024",
+   "33331760"
+  ],
   "android.scaler.availableStallDurations": [
+   "32",
+   "1920",
+   "1440",
+   "17971200",
    "33", 
    "1920", 
    "1440", 
@@ -1996,7 +2153,29 @@
    "144", 
    "164736"
   ], 
+  "android.scaler.availableStallDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "287539200",
+   "33",
+   "6048",
+   "4024",
+   "287539200"
+   ],
   "android.scaler.availableStreamConfigurations": [
+   "32",
+   "1920",
+   "1440",
+   "OUTPUT",
+   "35", 
+   "1920", 
+   "1440", 
+   "INPUT", 
+   "34", 
+   "1920", 
+   "1440", 
+   "INPUT", 
    "34", 
    "1920", 
    "1440", 
@@ -2178,15 +2357,52 @@
    "144", 
    "OUTPUT"
   ], 
+ "android.scaler.availableStreamConfigurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "INPUT",
+   "32",
+   "6048",
+   "4024",
+   "OUTPUT",
+   "33", 
+   "6048", 
+   "4024", 
+   "OUTPUT",
+   "35", 
+   "6048", 
+   "4024", 
+   "OUTPUT"
+  ],
   "android.scaler.croppingType": [
    "CENTER_ONLY"
   ], 
+  "android.scaler.physicalCameraMultiResolutionStreamConfigurations": [
+   "34", 
+   "1920", 
+   "1440", 
+   "INPUT", 
+   "35", 
+   "1920", 
+   "1440", 
+   "INPUT",
+   "33", 
+   "1920", 
+   "1440", 
+   "OUTPUT", 
+   "34", 
+   "1920", 
+   "1440", 
+   "OUTPUT", 
+   "35", 
+   "1920", 
+   "1440", 
+   "OUTPUT"
+  ], 
   "android.sensor.availableTestPatternModes": [
    "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
+   "5"
   ], 
   "android.sensor.blackLevelPattern": [
    "64", 
@@ -2320,6 +2536,12 @@
    "1920", 
    "1440"
   ], 
+  "android.sensor.info.activeArraySizeMaximumResolution": [
+   "0", 
+   "0", 
+   "6048", 
+   "4024"
+  ], 
   "android.sensor.info.colorFilterArrangement": [
    "RGGB"
   ], 
@@ -2347,6 +2569,20 @@
    "1920", 
    "1440"
   ], 
+  "android.sensor.info.pixelArraySizeMaximumResolution": [
+   "6048", 
+   "4024"
+  ], 
+  "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
+   "0", 
+   "0", 
+   "6048", 
+   "4024"
+  ], 
+ "android.sensor.info.binningFactor": [
+   "2",
+   "2"
+  ],
   "android.sensor.info.sensitivityRange": [
    "100", 
    "1000"
@@ -2388,11 +2624,15 @@
    "0"
   ], 
   "android.statistics.info.availableLensShadingMapModes": [
-   "0"
+   "0",
+   "1"
   ], 
   "android.statistics.info.maxFaceCount": [
    "10"
   ], 
+  "android.info.supportedBufferManagementVersion" : [
+    "HIDL_DEVICE_3_5" 
+  ], 
   "android.sync.maxLatency": [
    "PER_FRAME_CONTROL"
   ], 
@@ -2582,7 +2822,8 @@
   "android.edge.availableEdgeModes": [
    "1", 
    "2", 
-   "0"
+   "0",
+   "3"
   ], 
   "android.flash.info.available": [
    "TRUE"
@@ -2616,7 +2857,7 @@
    "240"
   ], 
   "android.jpeg.maxSize": [
-   "300000"
+   "3000000"
   ], 
   "android.lens.distortion": [
    "0.27679554", 
@@ -2679,17 +2920,41 @@
    "0", 
    "1", 
    "2", 
-   "3"
-  ], 
-  "android.reprocess.maxCaptureStall": [
-   "2"
+   "3",
+   "4"
   ], 
   "android.request.availableCapabilities": [
    "BACKWARD_COMPATIBLE", 
    "READ_SENSOR_SETTINGS", 
    "MANUAL_SENSOR", 
    "BURST_CAPTURE", 
-   "MANUAL_POST_PROCESSING"
+   "MANUAL_POST_PROCESSING",
+   "PRIVATE_REPROCESSING", 
+   "ULTRA_HIGH_RESOLUTION_SENSOR",
+   "YUV_REPROCESSING", 
+   "RAW",
+   "REMOSAIC_REPROCESSING"
+  ], 
+  "android.request.maxNumInputStreams": [
+   "1"
+  ], 
+  "android.scaler.availableInputOutputFormatsMap": [
+   "34", 
+   "2", 
+   "33", 
+   "35", 
+   "35", 
+   "2", 
+   "33", 
+   "35"
+  ], 
+  "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+   "32",
+   "1",
+   "32"
+  ],
+  "android.reprocess.maxCaptureStall": [
+   "2"
   ], 
   "android.request.availableCharacteristicsKeys": [
    "4", 
@@ -2795,7 +3060,8 @@
    "524295", 
    "524294", 
    "524298", 
-   "851984"
+   "851984", 
+   "851986"
   ], 
   "android.request.availableRequestKeys": [
    "0", 
@@ -2855,7 +3121,8 @@
    "1245186", 
    "1245187", 
    "1441792", 
-   "851985"
+   "851985",
+   "917536"
   ], 
   "android.request.availableResultKeys": [
    "0", 
@@ -2929,10 +3196,8 @@
    "65552",
    "1114123",
    "917530", 
-   "851985"
-  ], 
-  "android.request.maxNumInputStreams": [
-   "0"
+   "851985",
+   "917536"
   ], 
   "android.request.maxNumOutputStreams": [
    "1", 
@@ -2952,6 +3217,26 @@
    "8.00000000"
   ], 
   "android.scaler.availableMinFrameDurations": [
+   "32",
+   "2048",
+   "1536",
+   "33331760",
+   "34", 
+   "2048", 
+   "1536", 
+   "33331760", 
+   "35", 
+   "2048", 
+   "1536", 
+   "33331760", 
+   "33", 
+   "2048", 
+   "1536", 
+   "33331760", 
+   "32",
+   "1920",
+   "1440",
+   "33331760",
    "34", 
    "1920", 
    "1440", 
@@ -3133,7 +3418,33 @@
    "144", 
    "33331760"
   ], 
+  "android.scaler.availableMinFrameDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "33331760",
+   "33",
+   "6048",
+   "4024",
+   "33331760",
+   "35",
+   "6048",
+   "4024",
+   "33331760"
+  ],
   "android.scaler.availableStallDurations": [
+   "32",
+   "2048",
+   "1536",
+   "17971200",
+   "33", 
+   "2048", 
+   "1536", 
+   "17971200", 
+   "32",
+   "1920",
+   "1440",
+   "17971200",
    "33", 
    "1920", 
    "1440", 
@@ -3195,8 +3506,46 @@
    "144", 
    "164736"
   ], 
+  "android.scaler.availableStallDurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "287539200",
+   "33",
+   "6048",
+   "4024",
+   "287539200"
+   ],
   "android.scaler.availableStreamConfigurations": [
    "34", 
+   "2048", 
+   "1536", 
+   "INPUT", 
+   "35", 
+   "2048", 
+   "1536", 
+   "INPUT", 
+   "32",
+   "2048",
+   "1536",
+   "OUTPUT",
+   "34", 
+   "2048", 
+   "1536", 
+   "OUTPUT", 
+   "35", 
+   "2048", 
+   "1536", 
+   "OUTPUT", 
+   "33", 
+   "2048", 
+   "1536", 
+   "OUTPUT", 
+   "32",
+   "1920",
+   "1440",
+   "OUTPUT",
+   "34", 
    "1920", 
    "1440", 
    "OUTPUT", 
@@ -3373,15 +3722,52 @@
    "144", 
    "OUTPUT"
   ], 
+  "android.scaler.availableStreamConfigurationsMaximumResolution": [
+   "32",
+   "6048",
+   "4024",
+   "INPUT",
+   "32",
+   "6048",
+   "4024",
+   "OUTPUT",
+   "33", 
+   "6048", 
+   "4024", 
+   "OUTPUT",
+   "35", 
+   "6048", 
+   "4024", 
+   "OUTPUT"
+  ],
   "android.scaler.croppingType": [
    "CENTER_ONLY"
   ], 
+  "android.scaler.physicalCameraMultiResolutionStreamConfigurations": [
+   "34", 
+   "2048", 
+   "1536", 
+   "INPUT", 
+   "35", 
+   "2048", 
+   "1536", 
+   "INPUT",
+   "33", 
+   "2048", 
+   "1536", 
+   "OUTPUT", 
+   "34", 
+   "2048", 
+   "1536", 
+   "OUTPUT", 
+   "35", 
+   "2048", 
+   "1536", 
+   "OUTPUT"
+  ], 
   "android.sensor.availableTestPatternModes": [
    "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
+   "5"
   ], 
   "android.sensor.blackLevelPattern": [
    "64", 
@@ -3512,8 +3898,14 @@
   "android.sensor.info.activeArraySize": [
    "0", 
    "0", 
-   "1920", 
-   "1440"
+   "2048", 
+   "1536"
+  ], 
+  "android.sensor.info.activeArraySizeMaximumResolution": [
+   "0", 
+   "0", 
+   "6048", 
+   "4024"
   ], 
   "android.sensor.info.colorFilterArrangement": [
    "RGGB"
@@ -3533,1210 +3925,29 @@
    "3.49600005"
   ], 
   "android.sensor.info.pixelArraySize": [
-   "1920", 
-   "1440"
+   "2048", 
+   "1536"
   ], 
   "android.sensor.info.preCorrectionActiveArraySize": [
    "0", 
    "0", 
-   "1920", 
-   "1440"
+   "2048", 
+   "1536"
   ], 
-  "android.sensor.info.sensitivityRange": [
-   "100", 
-   "1000"
+  "android.sensor.info.pixelArraySizeMaximumResolution": [
+   "6048", 
+   "4024"
   ], 
-  "android.sensor.info.timestampSource": [
-   "REALTIME"
-  ], 
-  "android.sensor.info.whiteLevel": [
-   "1023"
-  ], 
-  "android.sensor.maxAnalogSensitivity": [
-   "640"
-  ], 
-  "android.sensor.orientation": [
-   "270"
-  ], 
-  "android.sensor.profileHueSatMapDimensions": [
+  "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
    "0", 
    "0", 
-   "0"
+   "6048", 
+   "4024"
   ], 
-  "android.sensor.referenceIlluminant1": [
-   "D65"
-  ], 
-  "android.sensor.referenceIlluminant2": [
-   "17"
-  ], 
-  "android.shading.availableModes": [
-   "0", 
-   "1", 
-   "2"
-  ], 
-  "android.statistics.info.availableFaceDetectModes": [
-   "0", 
-   "1", 
-   "2"
-  ], 
-  "android.statistics.info.availableHotPixelMapModes": [
-   "0"
-  ], 
-  "android.statistics.info.availableLensShadingMapModes": [
-   "0"
-  ], 
-  "android.statistics.info.maxFaceCount": [
-   "10"
-  ], 
-  "android.sync.maxLatency": [
-   "PER_FRAME_CONTROL"
-  ], 
-  "android.tonemap.availableToneMapModes": [
-   "0", 
-   "1", 
-   "2"
-  ], 
-  "android.tonemap.maxCurvePoints": [
-   "64"
-  ]
- }, 
- {
-  "android.colorCorrection.availableAberrationModes": [
-   "0", 
-   "1", 
-   "2"
-  ], 
-  "android.control.aeAvailableAntibandingModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3"
-  ], 
-  "android.control.aeAvailableModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
-  ], 
-  "android.control.aeAvailableTargetFpsRanges": [
-   "15", 
-   "15", 
-   "15", 
-   "30", 
-   "7", 
-   "30", 
-   "30", 
-   "30"
-  ], 
-  "android.control.aeCompensationRange": [
-   "-24", 
-   "24"
-  ], 
-  "android.control.aeCompensationStep": [
-   "1", 
-   "6"
-  ], 
-  "android.control.aeLockAvailable": [
-   "TRUE"
-  ], 
-  "android.control.afAvailableModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
-  ], 
-  "android.control.availableExtendedSceneModeMaxSizes": [
-   "0", 
-   "0", 
-   "0", 
-   "1", 
-   "1920", 
-   "1080", 
-   "2", 
-   "1920", 
-   "1440" 
-  ], 
-  "android.control.availableExtendedSceneModeZoomRatioRanges": [
-    "2.0", 
-    "2.0", 
-    "1.0", 
-    "1.0"
-  ], 
-  "android.control.availableEffects": [
-   "0"
-  ], 
-  "android.control.availableModes": [
-   "0", 
-   "1", 
+ "android.sensor.info.binningFactor": [
    "2",
-   "4"
-  ], 
-  "android.control.availableSceneModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4", 
-   "5", 
-   "6", 
-   "7", 
-   "8", 
-   "9", 
-   "10", 
-   "12", 
-   "13", 
-   "14", 
-   "15", 
-   "18"
-  ], 
-  "android.control.availableVideoStabilizationModes": [
-   "0"
-  ], 
-  "android.control.awbAvailableModes": [
-   "1", 
-   "2", 
-   "3", 
-   "4", 
-   "5", 
-   "6", 
-   "7", 
-   "8", 
-   "0"
-  ], 
-  "android.control.awbLockAvailable": [
-   "TRUE"
-  ], 
-  "android.control.maxRegions": [
-   "1", 
-   "0", 
-   "1"
-  ], 
-  "android.control.postRawSensitivityBoostRange": [
-   "100", 
-   "100"
-  ], 
-  "android.control.sceneModeOverrides": [
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0", 
-   "1", 
-   "1", 
-   "0"
-  ], 
-  "android.control.zoomRatioRange": [
-   "1.0", 
-   "8.0" 
-  ], 
-  "android.distortionCorrection.availableModes": [
-   "0"
-  ], 
-  "android.edge.availableEdgeModes": [
-   "1", 
-   "2", 
-   "0"
-  ], 
-  "android.flash.info.available": [
-   "TRUE"
-  ], 
-  "android.hotPixel.availableHotPixelModes": [
-   "0", 
-   "1", 
    "2"
-  ], 
-  "android.info.supportedHardwareLevel": [
-   "FULL"
-  ], 
-  "android.jpeg.availableThumbnailSizes": [
-   "0", 
-   "0", 
-   "176", 
-   "144", 
-   "240", 
-   "144", 
-   "256", 
-   "144", 
-   "240", 
-   "160", 
-   "256", 
-   "154", 
-   "246", 
-   "184", 
-   "240", 
-   "240", 
-   "320", 
-   "240"
-  ], 
-  "android.jpeg.maxSize": [
-   "300000"
-  ], 
-  "android.lens.distortion": [
-   "0.27679554", 
-   "-1.56508207", 
-   "3.02522445", 
-   "0.00000000", 
-   "0.00000000"
-  ], 
-  "android.lens.facing": [
-   "FRONT"
-  ], 
-  "android.lens.info.availableApertures": [
-   "2.40000010"
-  ], 
-  "android.lens.info.availableFilterDensities": [
-   "0.00000000"
-  ], 
-  "android.lens.info.availableFocalLengths": [
-   "5.84000015"
-  ], 
-  "android.lens.info.availableOpticalStabilization": [
-   "0", 
-   "1"
-  ], 
-  "android.lens.info.focusDistanceCalibration": [
-   "APPROXIMATE"
-  ], 
-  "android.lens.info.hyperfocalDistance": [
-   "0.14073935"
-  ], 
-  "android.lens.info.minimumFocusDistance": [
-   "10.10101032"
-  ], 
-  "android.lens.info.shadingMapSize": [
-   "17", 
-   "13"
-  ], 
-  "android.lens.intrinsicCalibration": [
-   "5941.24902344", 
-   "5941.24902344", 
-   "960.14233398", 
-   "540.47375488", 
-   "0.00000000"
-  ], 
-  "android.lens.poseReference": [
-   "PRIMARY_CAMERA"
-  ], 
-  "android.lens.poseRotation": [
-   "-0.00032215", 
-   "0.00118852", 
-   "-0.00006529", 
-   "0.99999928"
-  ], 
-  "android.lens.poseTranslation": [
-   "0.00034755", 
-   "0.01267981", 
-   "-0.00284645"
-  ], 
-  "android.noiseReduction.availableNoiseReductionModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3"
-  ], 
-  "android.reprocess.maxCaptureStall": [
-   "2"
-  ], 
-  "android.request.availableCapabilities": [
-   "BACKWARD_COMPATIBLE", 
-   "READ_SENSOR_SETTINGS", 
-   "MANUAL_SENSOR", 
-   "BURST_CAPTURE", 
-   "MANUAL_POST_PROCESSING"
-  ], 
-  "android.request.availableCharacteristicsKeys": [
-   "4", 
-   "65554", 
-   "65555", 
-   "65556", 
-   "65557", 
-   "65558", 
-   "65559", 
-   "65560", 
-   "65561", 
-   "65562", 
-   "65563", 
-   "65564", 
-   "65565", 
-   "65571", 
-   "65572", 
-   "65573", 
-   "65574", 
-   "65575", 
-   "65579", 
-   "65580", 
-   "65582", 
-   "1638402", 
-   "1638401", 
-   "1638403", 
-   "1638404", 
-   "1769473", 
-   "196610", 
-   "327680", 
-   "393217", 
-   "458759", 
-   "458760", 
-   "589824", 
-   "589825", 
-   "589826", 
-   "589827", 
-   "589828", 
-   "589829", 
-   "589830", 
-   "589831", 
-   "524293", 
-   "1703936", 
-   "1703937", 
-   "1703938", 
-   "655362", 
-   "786438", 
-   "786440", 
-   "786442", 
-   "786443", 
-   "786444", 
-   "786445", 
-   "786446", 
-   "786447", 
-   "786448", 
-   "851972", 
-   "851977", 
-   "851978", 
-   "851979", 
-   "851976", 
-   "851980", 
-   "851981", 
-   "983040", 
-   "917526", 
-   "917523", 
-   "917534", 
-   "983041", 
-   "983042", 
-   "983043", 
-   "983044", 
-   "983045", 
-   "983046", 
-   "983047", 
-   "983048", 
-   "983049", 
-   "983050", 
-   "917507", 
-   "917508", 
-   "917509", 
-   "917510", 
-   "917511", 
-   "917512", 
-   "917513", 
-   "917514", 
-   "917516", 
-   "917517", 
-   "917518", 
-   "917519", 
-   "917529", 
-   "1048578", 
-   "1179648", 
-   "1179650", 
-   "1179654", 
-   "1179655", 
-   "1179656", 
-   "1245188", 
-   "1245189", 
-   "1376256", 
-   "1507329", 
-   "1572865", 
-   "524300", 
-   "524301", 
-   "524295", 
-   "524294", 
-   "524298", 
-   "851984"
-  ], 
-  "android.request.availableRequestKeys": [
-   "0", 
-   "1", 
-   "2", 
-   "3", 
-   "65536", 
-   "65537", 
-   "65538", 
-   "65539", 
-   "65540", 
-   "65541", 
-   "65542", 
-   "65543", 
-   "65544", 
-   "65545", 
-   "65546", 
-   "65547", 
-   "65549", 
-   "65550", 
-   "65551", 
-   "65552", 
-   "65553", 
-   "65576", 
-   "65581", 
-   "65583", 
-   "1769472", 
-   "196608", 
-   "262146", 
-   "393216", 
-   "458752", 
-   "458753", 
-   "458754", 
-   "458755", 
-   "458756", 
-   "458757", 
-   "458758", 
-   "524288", 
-   "524289", 
-   "524290", 
-   "524291", 
-   "524292", 
-   "655360", 
-   "786433", 
-   "851968", 
-   "917504", 
-   "917505", 
-   "917506", 
-   "917520", 
-   "1048576", 
-   "1114112", 
-   "1114115", 
-   "1114128", 
-   "1114129", 
-   "1245184", 
-   "1245185", 
-   "1245186", 
-   "1245187", 
-   "1441792", 
-   "851985"
-  ], 
-  "android.request.availableResultKeys": [
-   "0", 
-   "1", 
-   "2", 
-   "65537", 
-   "65539", 
-   "65540", 
-   "65567", 
-   "65543", 
-   "65544", 
-   "65568", 
-   "65547", 
-   "65570", 
-   "65551", 
-   "65576", 
-   "65581", 
-   "65583", 
-   "196608", 
-   "262146", 
-   "262149", 
-   "458752", 
-   "458753", 
-   "458754", 
-   "458755", 
-   "458756", 
-   "458757", 
-   "458758", 
-   "524288", 
-   "524289", 
-   "524290", 
-   "524291", 
-   "524296", 
-   "524297", 
-   "524294", 
-   "524295", 
-   "524298", 
-   "524301", 
-   "524292", 
-   "655360", 
-   "786433", 
-   "851968", 
-   "917526", 
-   "917523", 
-   "917504", 
-   "917505", 
-   "917506", 
-   "917520", 
-   "917522", 
-   "917525", 
-   "1048576", 
-   "1114112", 
-   "1114121", 
-   "1114124", 
-   "1114125", 
-   "1114126", 
-   "1114118", 
-   "1114119", 
-   "1114129", 
-   "1114130", 
-   "1114131", 
-   "1114132", 
-   "1245184", 
-   "1245185", 
-   "1245186", 
-   "1245187", 
-   "1441792", 
-   "65541",
-   "65542",
-   "65545",
-   "65552",
-   "1114123",
-   "917530", 
-   "851985"
-  ], 
-  "android.request.maxNumInputStreams": [
-   "0"
-  ], 
-  "android.request.maxNumOutputStreams": [
-   "1", 
-   "3", 
-   "2"
-  ], 
- "android.scaler.availableRotateAndCropModes": [
-   "0"
-  ], 
-  "android.request.partialResultCount": [
-   "1"
-  ], 
-  "android.request.pipelineMaxDepth": [
-   "8"
-  ], 
-  "android.scaler.availableMaxDigitalZoom": [
-   "8.00000000"
-  ], 
-  "android.scaler.availableMinFrameDurations": [
-   "34", 
-   "1920", 
-   "1440", 
-   "33331760", 
-   "35", 
-   "1920", 
-   "1440", 
-   "33331760", 
-   "33", 
-   "1920", 
-   "1440", 
-   "33331760", 
-   "34", 
-   "1920", 
-   "1080", 
-   "33331760", 
-   "35", 
-   "1920", 
-   "1080", 
-   "33331760", 
-   "33", 
-   "1920", 
-   "1080", 
-   "33331760", 
-   "34", 
-   "1920", 
-   "960", 
-   "33331760", 
-   "35", 
-   "1920", 
-   "960", 
-   "33331760", 
-   "33", 
-   "1920", 
-   "960", 
-   "33331760", 
-   "34", 
-   "1600", 
-   "1200", 
-   "33331760", 
-   "35", 
-   "1600", 
-   "1200", 
-   "33331760", 
-   "33", 
-   "1600", 
-   "1200", 
-   "33331760", 
-   "34", 
-   "1440", 
-   "1080", 
-   "33331760", 
-   "35", 
-   "1440", 
-   "1080", 
-   "33331760", 
-   "33", 
-   "1440", 
-   "1080", 
-   "33331760", 
-   "34", 
-   "1280", 
-   "960", 
-   "33331760", 
-   "35", 
-   "1280", 
-   "960", 
-   "33331760", 
-   "33", 
-   "1280", 
-   "960", 
-   "33331760", 
-   "34", 
-   "1280", 
-   "720", 
-   "33331760", 
-   "35", 
-   "1280", 
-   "720", 
-   "33331760", 
-   "33", 
-   "1280", 
-   "720", 
-   "33331760", 
-   "34", 
-   "1024", 
-   "768", 
-   "33331760", 
-   "35", 
-   "1024", 
-   "768", 
-   "33331760", 
-   "33", 
-   "1024", 
-   "768", 
-   "33331760", 
-   "34", 
-   "800", 
-   "600", 
-   "33331760", 
-   "35", 
-   "800", 
-   "600", 
-   "33331760", 
-   "33", 
-   "800", 
-   "600", 
-   "33331760", 
-   "34", 
-   "720", 
-   "480", 
-   "33331760", 
-   "35", 
-   "720", 
-   "480", 
-   "33331760", 
-   "33", 
-   "720", 
-   "480", 
-   "33331760", 
-   "34", 
-   "640", 
-   "480", 
-   "33331760", 
-   "35", 
-   "640", 
-   "480", 
-   "33331760", 
-   "33", 
-   "640", 
-   "480", 
-   "33331760", 
-   "34", 
-   "640", 
-   "360", 
-   "33331760", 
-   "35", 
-   "640", 
-   "360", 
-   "33331760", 
-   "33", 
-   "640", 
-   "360", 
-   "33331760", 
-   "34", 
-   "352", 
-   "288", 
-   "33331760", 
-   "35", 
-   "352", 
-   "288", 
-   "33331760", 
-   "33", 
-   "352", 
-   "288", 
-   "33331760", 
-   "34", 
-   "320", 
-   "240", 
-   "33331760", 
-   "35", 
-   "320", 
-   "240", 
-   "33331760", 
-   "33", 
-   "320", 
-   "240", 
-   "33331760", 
-   "34", 
-   "176", 
-   "144", 
-   "33331760", 
-   "35", 
-   "176", 
-   "144", 
-   "33331760", 
-   "33", 
-   "176", 
-   "144", 
-   "33331760"
-  ], 
-  "android.scaler.availableStallDurations": [
-   "33", 
-   "1920", 
-   "1440", 
-   "17971200", 
-   "33", 
-   "1920", 
-   "1080", 
-   "13478400", 
-   "33", 
-   "1920", 
-   "960", 
-   "11980800", 
-   "33", 
-   "1600", 
-   "1200", 
-   "12480000", 
-   "33", 
-   "1440", 
-   "1080", 
-   "10108800", 
-   "33", 
-   "1280", 
-   "960", 
-   "7987200", 
-   "33", 
-   "1280", 
-   "720", 
-   "5990400", 
-   "33", 
-   "1024", 
-   "768", 
-   "5111808", 
-   "33", 
-   "800", 
-   "600", 
-   "3120000", 
-   "33", 
-   "720", 
-   "480", 
-   "2246400", 
-   "33", 
-   "640", 
-   "480", 
-   "1996800", 
-   "33", 
-   "640", 
-   "360", 
-   "1497600", 
-   "33", 
-   "352", 
-   "288", 
-   "658944", 
-   "33", 
-   "320", 
-   "240", 
-   "499200", 
-   "33", 
-   "176", 
-   "144", 
-   "164736"
-  ], 
-  "android.scaler.availableStreamConfigurations": [
-   "34", 
-   "1920", 
-   "1440", 
-   "OUTPUT", 
-   "35", 
-   "1920", 
-   "1440", 
-   "OUTPUT", 
-   "33", 
-   "1920", 
-   "1440", 
-   "OUTPUT", 
-   "34", 
-   "1920", 
-   "1080", 
-   "OUTPUT", 
-   "35", 
-   "1920", 
-   "1080", 
-   "OUTPUT", 
-   "33", 
-   "1920", 
-   "1080", 
-   "OUTPUT", 
-   "34", 
-   "1920", 
-   "960", 
-   "OUTPUT", 
-   "35", 
-   "1920", 
-   "960", 
-   "OUTPUT", 
-   "33", 
-   "1920", 
-   "960", 
-   "OUTPUT", 
-   "34", 
-   "1600", 
-   "1200", 
-   "OUTPUT", 
-   "35", 
-   "1600", 
-   "1200", 
-   "OUTPUT", 
-   "33", 
-   "1600", 
-   "1200", 
-   "OUTPUT", 
-   "34", 
-   "1440", 
-   "1080", 
-   "OUTPUT", 
-   "35", 
-   "1440", 
-   "1080", 
-   "OUTPUT", 
-   "33", 
-   "1440", 
-   "1080", 
-   "OUTPUT", 
-   "34", 
-   "1280", 
-   "960", 
-   "OUTPUT", 
-   "35", 
-   "1280", 
-   "960", 
-   "OUTPUT", 
-   "33", 
-   "1280", 
-   "960", 
-   "OUTPUT", 
-   "34", 
-   "1280", 
-   "720", 
-   "OUTPUT", 
-   "35", 
-   "1280", 
-   "720", 
-   "OUTPUT", 
-   "33", 
-   "1280", 
-   "720", 
-   "OUTPUT", 
-   "34", 
-   "1024", 
-   "768", 
-   "OUTPUT", 
-   "35", 
-   "1024", 
-   "768", 
-   "OUTPUT", 
-   "33", 
-   "1024", 
-   "768", 
-   "OUTPUT", 
-   "34", 
-   "800", 
-   "600", 
-   "OUTPUT", 
-   "35", 
-   "800", 
-   "600", 
-   "OUTPUT", 
-   "33", 
-   "800", 
-   "600", 
-   "OUTPUT", 
-   "34", 
-   "720", 
-   "480", 
-   "OUTPUT", 
-   "35", 
-   "720", 
-   "480", 
-   "OUTPUT", 
-   "33", 
-   "720", 
-   "480", 
-   "OUTPUT", 
-   "34", 
-   "640", 
-   "480", 
-   "OUTPUT", 
-   "35", 
-   "640", 
-   "480", 
-   "OUTPUT", 
-   "33", 
-   "640", 
-   "480", 
-   "OUTPUT", 
-   "34", 
-   "640", 
-   "360", 
-   "OUTPUT", 
-   "35", 
-   "640", 
-   "360", 
-   "OUTPUT", 
-   "33", 
-   "640", 
-   "360", 
-   "OUTPUT", 
-   "34", 
-   "352", 
-   "288", 
-   "OUTPUT", 
-   "35", 
-   "352", 
-   "288", 
-   "OUTPUT", 
-   "33", 
-   "352", 
-   "288", 
-   "OUTPUT", 
-   "34", 
-   "320", 
-   "240", 
-   "OUTPUT", 
-   "35", 
-   "320", 
-   "240", 
-   "OUTPUT", 
-   "33", 
-   "320", 
-   "240", 
-   "OUTPUT", 
-   "34", 
-   "176", 
-   "144", 
-   "OUTPUT", 
-   "35", 
-   "176", 
-   "144", 
-   "OUTPUT"
-  ], 
-  "android.scaler.croppingType": [
-   "CENTER_ONLY"
-  ], 
-  "android.sensor.availableTestPatternModes": [
-   "0", 
-   "1", 
-   "2", 
-   "3", 
-   "4"
-  ], 
-  "android.sensor.blackLevelPattern": [
-   "64", 
-   "64", 
-   "64", 
-   "64"
-  ], 
-  "android.sensor.calibrationTransform1": [
-   "68", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "128", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "78", 
-   "128"
-  ], 
-  "android.sensor.calibrationTransform2": [
-   "97", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "128", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "0", 
-   "128", 
-   "41", 
-   "128"
-  ], 
-  "android.sensor.colorTransform1": [
-   "221", 
-   "128", 
-   "-105", 
-   "128", 
-   "-34", 
-   "128", 
-   "-124", 
-   "128", 
-   "240", 
-   "128", 
-   "5", 
-   "128", 
-   "5", 
-   "128", 
-   "-19", 
-   "128", 
-   "99", 
-   "128"
-  ], 
-  "android.sensor.colorTransform2": [
-   "360", 
-   "128", 
-   "-249", 
-   "128", 
-   "-63", 
-   "128", 
-   "-137", 
-   "128", 
-   "279", 
-   "128", 
-   "-1", 
-   "128", 
-   "7", 
-   "128", 
-   "-18", 
-   "128", 
-   "159", 
-   "128"
-  ], 
-  "android.sensor.forwardMatrix1": [
-   "56", 
-   "128", 
-   "49", 
-   "128", 
-   "18", 
-   "128", 
-   "28", 
-   "128", 
-   "92", 
-   "128", 
-   "8", 
-   "128", 
-   "2", 
-   "128", 
-   "12", 
-   "128", 
-   "91", 
-   "128"
-  ], 
-  "android.sensor.forwardMatrix2": [
-   "56", 
-   "128", 
-   "49", 
-   "128", 
-   "18", 
-   "128", 
-   "28", 
-   "128", 
-   "92", 
-   "128", 
-   "8", 
-   "128", 
-   "2", 
-   "128", 
-   "12", 
-   "128", 
-   "91", 
-   "128"
-  ], 
-  "android.sensor.info.activeArraySize": [
-   "0", 
-   "0", 
-   "1920", 
-   "1440"
-  ], 
-  "android.sensor.info.colorFilterArrangement": [
-   "RGGB"
-  ], 
-  "android.sensor.info.exposureTimeRange": [
-   "9377", 
-   "13388615424"
-  ], 
-  "android.sensor.info.lensShadingApplied": [
-   "TRUE"
-  ], 
-  "android.sensor.info.maxFrameDuration": [
-   "1319055264"
-  ], 
-  "android.sensor.info.physicalSize": [
-   "4.65600014", 
-   "3.49600005"
-  ], 
-  "android.sensor.info.pixelArraySize": [
-   "1920", 
-   "1440"
-  ], 
-  "android.sensor.info.preCorrectionActiveArraySize": [
-   "0", 
-   "0", 
-   "1920", 
-   "1440"
-  ], 
+  ],
   "android.sensor.info.sensitivityRange": [
    "100", 
    "1000"
@@ -4778,11 +3989,15 @@
    "0"
   ], 
   "android.statistics.info.availableLensShadingMapModes": [
-   "0"
+   "0", 
+   "1"
   ], 
   "android.statistics.info.maxFaceCount": [
    "10"
   ], 
+  "android.info.supportedBufferManagementVersion" : [
+    "HIDL_DEVICE_3_5" 
+  ], 
   "android.sync.maxLatency": [
    "PER_FRAME_CONTROL"
   ], 
diff --git a/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp b/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
index 90f769d..c310bbb 100644
--- a/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
+++ b/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
@@ -948,7 +948,8 @@
   ret = metadata.Get(ANDROID_SCALER_CROP_REGION, &entry);
   if (ret == OK) {
     if (!SetDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
-                             sensor_chars_.width, sensor_chars_.height)) {
+                             sensor_chars_.full_res_width,
+                             sensor_chars_.full_res_height)) {
       ALOGE("%s: setting digital zoom ratio failed.", __FUNCTION__);
       return false;
     }
diff --git a/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp b/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
index c2add6b..c50297e 100644
--- a/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
+++ b/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
@@ -16,31 +16,14 @@
 
 #define LOG_TAG "HWLUtils"
 #include "HWLUtils.h"
-
 #include <log/log.h>
+#include "utils.h"
 
 #include <map>
 
 namespace android {
 
-bool HasCapability(const HalCameraMetadata* metadata, uint8_t capability) {
-  if (metadata == nullptr) {
-    return false;
-  }
-
-  camera_metadata_ro_entry_t entry;
-  auto ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
-  if (ret != OK) {
-    return false;
-  }
-  for (size_t i = 0; i < entry.count; i++) {
-    if (entry.data.u8[i] == capability) {
-      return true;
-    }
-  }
-
-  return false;
-}
+using google_camera_hal::utils::HasCapability;
 
 status_t GetSensorCharacteristics(const HalCameraMetadata* metadata,
                                   SensorCharacteristics* sensor_chars /*out*/) {
@@ -57,6 +40,16 @@
   }
   sensor_chars->width = entry.data.i32[0];
   sensor_chars->height = entry.data.i32[1];
+  sensor_chars->full_res_width = sensor_chars->width;
+  sensor_chars->full_res_height = sensor_chars->height;
+
+  ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                      &entry);
+  if ((ret == OK) && (entry.count == 2)) {
+    sensor_chars->full_res_width = entry.data.i32[0];
+    sensor_chars->full_res_height = entry.data.i32[1];
+    sensor_chars->quad_bayer_sensor = true;
+  }
 
   ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, &entry);
   if ((ret != OK) || (entry.count != 3)) {
diff --git a/devices/EmulatedCamera/hwl/utils/HWLUtils.h b/devices/EmulatedCamera/hwl/utils/HWLUtils.h
index cd023c9..4ee0054 100644
--- a/devices/EmulatedCamera/hwl/utils/HWLUtils.h
+++ b/devices/EmulatedCamera/hwl/utils/HWLUtils.h
@@ -43,7 +43,6 @@
 typedef std::unique_ptr<PhysicalDeviceMap> PhysicalDeviceMapPtr;
 
 // Metadata utility functions start
-bool HasCapability(const HalCameraMetadata* metadata, uint8_t capability);
 status_t GetSensorCharacteristics(const HalCameraMetadata* metadata,
                                   SensorCharacteristics* sensor_chars /*out*/);
 PhysicalDeviceMapPtr ClonePhysicalDeviceMap(const PhysicalDeviceMapPtr& src);
diff --git a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
index 6e2121c..ce840a4 100644
--- a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
+++ b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
@@ -20,6 +20,40 @@
 #include <log/log.h>
 
 namespace android {
+const uint32_t kScalerStreamConfigurations =
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+const uint32_t kScalerStreamConfigurationsMaxRes =
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthStreamConfigurations =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
+const uint32_t kDepthStreamConfigurationsMaxRes =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerMinFrameDurations =
+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+const uint32_t kScalerMinFrameDurationsMaxRes =
+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthMinFrameDurations =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS;
+const uint32_t kDepthMinFrameDurationsMaxRes =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerStallDurations = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
+const uint32_t kScalerStallDurationsMaxRes =
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerInputOutputFormatsMap =
+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP;
+const uint32_t kScalerInputOutputFormatsMapMaxRes =
+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthStallDurations =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+const uint32_t kDepthStallDurationsMaxRes =
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+
 void StreamConfigurationMap::AppendAvailableStreamConfigurations(
     const camera_metadata_ro_entry& entry) {
   for (size_t i = 0; i < entry.count; i += kStreamConfigurationSize) {
@@ -35,6 +69,22 @@
   }
 }
 
+void StreamConfigurationMap::AppendAvailableDynamicPhysicalStreamConfigurations(
+    const camera_metadata_ro_entry& entry) {
+  for (size_t i = 0; i < entry.count; i += kStreamConfigurationSize) {
+    int32_t width = entry.data.i32[i + kStreamWidthOffset];
+    int32_t height = entry.data.i32[i + kStreamHeightOffset];
+    auto format = static_cast<android_pixel_format_t>(
+        entry.data.i32[i + kStreamFormatOffset]);
+
+    // Both input and output dynamic stream sizes need to be supported as an
+    // output stream.
+    dynamic_physical_stream_output_formats_.insert(format);
+    dynamic_physical_stream_output_size_map_[format].insert(
+        std::make_pair(width, height));
+  }
+}
+
 void StreamConfigurationMap::AppendAvailableStreamMinDurations(
     const camera_metadata_ro_entry_t& entry) {
   for (size_t i = 0; i < entry.count; i += kStreamConfigurationSize) {
@@ -63,47 +113,70 @@
   }
 }
 
-StreamConfigurationMap::StreamConfigurationMap(const HalCameraMetadata& chars) {
+StreamConfigurationMap::StreamConfigurationMap(const HalCameraMetadata& chars,
+                                               bool maxResolution) {
   camera_metadata_ro_entry_t entry;
-  auto ret = chars.Get(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
+  const char* maxResolutionStr = maxResolution ? "true" : "false";
+  auto ret = chars.Get(maxResolution ? kScalerStreamConfigurationsMaxRes
+                                     : kScalerStreamConfigurations,
+                       &entry);
   if (ret != OK) {
-    ALOGW("%s: ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS missing!",
-          __FUNCTION__);
+    ALOGW(
+        "%s: ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS missing, "
+        "maxResolution ? %s!",
+        __FUNCTION__, maxResolutionStr);
     entry.count = 0;
   }
   AppendAvailableStreamConfigurations(entry);
 
-  ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &entry);
+  ret = chars.Get(maxResolution ? kDepthStreamConfigurationsMaxRes
+                                : kDepthStreamConfigurations,
+                  &entry);
+
   if (ret == OK) {
     AppendAvailableStreamConfigurations(entry);
   }
 
-  ret = chars.Get(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, &entry);
+  ret = chars.Get(
+      maxResolution ? kScalerMinFrameDurationsMaxRes : kScalerMinFrameDurations,
+      &entry);
   if (ret != OK) {
-    ALOGW("%s: ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS missing!",
-          __FUNCTION__);
+    ALOGW(
+        "%s: ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS missing!, max "
+        "resolution ? %s",
+        __FUNCTION__, maxResolutionStr);
     entry.count = 0;
   }
   AppendAvailableStreamMinDurations(entry);
 
-  ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, &entry);
+  ret = chars.Get(
+      maxResolution ? kDepthMinFrameDurationsMaxRes : kDepthMinFrameDurations,
+      &entry);
   if (ret == OK) {
     AppendAvailableStreamMinDurations(entry);
   }
 
-  ret = chars.Get(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, &entry);
+  ret = chars.Get(
+      maxResolution ? kScalerStallDurationsMaxRes : kScalerStallDurations,
+      &entry);
   if (ret != OK) {
-    ALOGW("%s: ANDROID_SCALER_AVAILABLE_STALL_DURATIONS missing!", __FUNCTION__);
+    ALOGW(
+        "%s: ANDROID_SCALER_AVAILABLE_STALL_DURATIONS missing! maxResolution ? "
+        "%s",
+        __FUNCTION__, maxResolutionStr);
     entry.count = 0;
   }
   AppendAvailableStreamStallDurations(entry);
 
-  ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, &entry);
+  ret = chars.Get(
+      maxResolution ? kDepthStallDurationsMaxRes : kDepthStallDurations, &entry);
   if (ret == OK) {
     AppendAvailableStreamStallDurations(entry);
   }
 
-  ret = chars.Get(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
+  ret = chars.Get(maxResolution ? kScalerInputOutputFormatsMapMaxRes
+                                : kScalerInputOutputFormatsMap,
+                  &entry);
   if (ret == OK) {
     size_t i = 0;
     while (i < entry.count) {
@@ -124,6 +197,13 @@
       stream_input_formats_.insert(input_format);
     }
   }
+
+  ret = chars.Get(
+      ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+      &entry);
+  if (ret == OK) {
+    AppendAvailableDynamicPhysicalStreamConfigurations(entry);
+  }
 }
 
 }  // namespace android
diff --git a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
index 1c576b2..f359f6a 100644
--- a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
+++ b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
@@ -17,6 +17,7 @@
 #ifndef EMULATOR_STREAM_CONFIGURATION_MAP_H_
 #define EMULATOR_STREAM_CONFIGURATION_MAP_H_
 
+#include <memory>
 #include <set>
 #include <unordered_map>
 
@@ -53,7 +54,8 @@
 
 class StreamConfigurationMap {
  public:
-  StreamConfigurationMap(const HalCameraMetadata& chars);
+  StreamConfigurationMap(const HalCameraMetadata& chars,
+                         bool maxResolution = false);
 
   const std::set<android_pixel_format_t>& GetOutputFormats() const {
     return stream_output_formats_;
@@ -63,6 +65,16 @@
     return stream_output_size_map_[format];
   }
 
+  const std::set<android_pixel_format_t>& GetDynamicPhysicalStreamOutputFormats()
+      const {
+    return dynamic_physical_stream_output_formats_;
+  }
+
+  const std::set<StreamSize>& GetDynamicPhysicalStreamOutputSizes(
+      android_pixel_format_t format) {
+    return dynamic_physical_stream_output_size_map_[format];
+  }
+
   nsecs_t GetOutputMinFrameDuration(StreamConfig configuration) const {
     auto ret = stream_min_duration_map_.find(configuration);
     return (ret == stream_min_duration_map_.end()) ? 0 : ret->second;
@@ -88,6 +100,8 @@
 
  private:
   void AppendAvailableStreamConfigurations(const camera_metadata_ro_entry& entry);
+  void AppendAvailableDynamicPhysicalStreamConfigurations(
+      const camera_metadata_ro_entry& entry);
   void AppendAvailableStreamMinDurations(const camera_metadata_ro_entry_t& entry);
   void AppendAvailableStreamStallDurations(const camera_metadata_ro_entry& entry);
 
@@ -109,8 +123,15 @@
   std::set<android_pixel_format_t> stream_input_formats_;
   std::unordered_map<android_pixel_format_t, std::set<android_pixel_format_t>>
       stream_input_output_map_;
+
+  std::set<android_pixel_format_t> dynamic_physical_stream_output_formats_;
+  std::unordered_map<android_pixel_format_t, std::set<StreamSize>>
+      dynamic_physical_stream_output_size_map_;
 };
 
+typedef std::unordered_map<uint32_t, std::unique_ptr<StreamConfigurationMap>>
+    PhysicalStreamConfigurationMap;
+
 }  // namespace android
 
 #endif  // EMULATOR_STREAM_CONFIGURATION_MAP_H_