Clean up visibility modifiers and reorganize some classes.

This change does the following things:

* Remove reduntant public modifiers from classes and fields.
* Move classes related to Frame metadata to `Frames.kt`
* Move classes related to Requests to `Requests.kt`

This change should have no behavior or API facing impact.

Test: ./gradlew\
 :camera:camera-camera2-pipe:testDebugUnitTest\
 :camera:camera-camera2-pipe-testing:testDebugUnitTest\
 :camera:camera-camera2-pipe-integration:testDebugUnitTest\
 :camera:integration-tests:camera-testapp-camera2-pipe:testDebugUnitTest

Change-Id: Ie46189fe6ef50acb0d869f45ad5ecfdc76ab13a1
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraBackend.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraBackend.kt
index ba21764..4f9485d 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraBackend.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraBackend.kt
@@ -20,7 +20,7 @@
 
 /** This is used to uniquely identify a specific backend implementation. */
 @JvmInline
-value class CameraBackendId(public val value: String)
+value class CameraBackendId(val value: String)
 
 /**
  * A CameraBackend is used by [CameraPipe] to abstract out the lifecycle, state, and interactions
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraControls.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraControls.kt
index 7c79688..9e94da7 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraControls.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraControls.kt
@@ -27,7 +27,7 @@
 // Public controls and enums used to interact with a CameraGraph.
 
 /** An enum to match the CameraMetadata.CONTROL_AF_MODE_* constants. */
-public enum class AfMode(public val value: Int) {
+enum class AfMode(val value: Int) {
     OFF(CameraMetadata.CONTROL_AF_MODE_OFF),
     AUTO(CameraMetadata.CONTROL_AF_MODE_AUTO),
     MACRO(CameraMetadata.CONTROL_AF_MODE_MACRO),
@@ -35,49 +35,49 @@
     CONTINUOUS_PICTURE(CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE),
     EDOF(CameraMetadata.CONTROL_AF_MODE_EDOF);
 
-    public companion object {
+    companion object {
         @JvmStatic
-        public fun fromIntOrNull(value: Int): AfMode? = values().firstOrNull { it.value == value }
+        fun fromIntOrNull(value: Int): AfMode? = values().firstOrNull { it.value == value }
     }
 }
 
 /** An enum to match the CameraMetadata.CONTROL_AE_MODE_* constants. */
-public enum class AeMode(public val value: Int) {
+enum class AeMode(val value: Int) {
     OFF(CameraMetadata.CONTROL_AE_MODE_OFF),
     ON(CameraMetadata.CONTROL_AE_MODE_ON),
     ON_AUTO_FLASH(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH),
     ON_ALWAYS_FLASH(CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH),
     ON_AUTO_FLASH_REDEYE(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
 
-    public companion object {
+    companion object {
         @JvmStatic
-        public fun fromIntOrNull(value: Int): AeMode? = values().firstOrNull { it.value == value }
+        fun fromIntOrNull(value: Int): AeMode? = values().firstOrNull { it.value == value }
     }
 }
 
 /** An enum to match the CameraMetadata.CONTROL_AWB_MODE_* constants. */
-public enum class AwbMode(public val value: Int) {
+enum class AwbMode(val value: Int) {
     AUTO(CameraMetadata.CONTROL_AWB_MODE_AUTO),
     CLOUDY_DAYLIGHT(CameraMetadata.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT),
     DAYLIGHT(CameraMetadata.CONTROL_AWB_MODE_DAYLIGHT),
     INCANDESCENT(CameraMetadata.CONTROL_AWB_MODE_INCANDESCENT),
     FLUORESCENT(CameraMetadata.CONTROL_AWB_MODE_FLUORESCENT);
 
-    public companion object {
+    companion object {
         @JvmStatic
-        public fun fromIntOrNull(value: Int): AwbMode? = values().firstOrNull { it.value == value }
+        fun fromIntOrNull(value: Int): AwbMode? = values().firstOrNull { it.value == value }
     }
 }
 
 /** An enum to match the CameraMetadata.FLASH_MODE_* constants. */
-public enum class FlashMode(public val value: Int) {
+enum class FlashMode(val value: Int) {
     OFF(CameraMetadata.FLASH_MODE_OFF),
     SINGLE(CameraMetadata.FLASH_MODE_SINGLE),
     TORCH(CameraMetadata.FLASH_MODE_TORCH);
 
-    public companion object {
+    companion object {
         @JvmStatic
-        public fun fromIntOrNull(value: Int): FlashMode? =
+        fun fromIntOrNull(value: Int): FlashMode? =
             values().firstOrNull { it.value == value }
     }
 }
@@ -92,13 +92,13 @@
  *
  * #CONTROL_AE_MODE_ON
  */
-public enum class TorchState {
+enum class TorchState {
     ON,
     OFF
 }
 
 /** Requirement to consider prior to locking auto-exposure, auto-focus and auto-whitebalance. */
-public enum class Lock3ABehavior {
+enum class Lock3ABehavior {
     /**
      * This requirement means that we want to lock the values for 3A immediately.
      *
@@ -132,14 +132,14 @@
  *   completion of the method, in that case this frameMetadata may not contain all the kay value
  *   pairs associated with the final result i.e. [TotalCaptureResult] of this frame.
  */
-public data class Result3A(val status: Status, val frameMetadata: FrameMetadata? = null) {
+data class Result3A(val status: Status, val frameMetadata: FrameMetadata? = null) {
     /**
      * Enum to know the status of 3A operation in case the method returns before the desired
      * operation is complete. The reason could be that the operation was talking a lot longer and an
      * enforced frame or time limit was reached, submitting the desired request to camera failed
      * etc.
      */
-    public enum class Status {
+    enum class Status {
         OK,
         FRAME_LIMIT_REACHED,
         TIME_LIMIT_REACHED,
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraDevices.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraDevices.kt
index 8aa550c..9a687b9 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraDevices.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraDevices.kt
@@ -24,7 +24,7 @@
 import kotlinx.coroutines.flow.flow
 
 /** Methods for querying, iterating, and selecting the Cameras that are available on the device. */
-public interface CameraDevices {
+interface CameraDevices {
     /**
      * Read the list of currently openable CameraIds from the provided CameraBackend, suspending if
      * needed. By default this will load the list of openable CameraIds from the default backend.
@@ -66,7 +66,7 @@
         replaceWith = ReplaceWith("awaitCameraIds"),
         level = DeprecationLevel.WARNING
     )
-    public fun findAll(): List<CameraId>
+    fun findAll(): List<CameraId>
 
     /**
      * Load the list of CameraIds from the Camera2 CameraManager, suspending if the list of
@@ -77,7 +77,7 @@
         replaceWith = ReplaceWith("getCameraIds"),
         level = DeprecationLevel.WARNING
     )
-    public suspend fun ids(): List<CameraId>
+    suspend fun ids(): List<CameraId>
 
     /**
      * Load CameraMetadata for a specific CameraId. Loading CameraMetadata can take a non-zero
@@ -89,7 +89,7 @@
         replaceWith = ReplaceWith("getCameraMetadata"),
         level = DeprecationLevel.WARNING
     )
-    public suspend fun getMetadata(camera: CameraId): CameraMetadata
+    suspend fun getMetadata(camera: CameraId): CameraMetadata
 
     /**
      * Load CameraMetadata for a specific CameraId and block the calling thread until the result is
@@ -100,14 +100,14 @@
         replaceWith = ReplaceWith("awaitCameraMetadata"),
         level = DeprecationLevel.WARNING
     )
-    public fun awaitMetadata(camera: CameraId): CameraMetadata
+    fun awaitMetadata(camera: CameraId): CameraMetadata
 }
 
 @JvmInline
-public value class CameraId(public val value: String) {
-    public companion object {
-        public inline fun fromCamera2Id(value: String): CameraId = CameraId(value)
-        public inline fun fromCamera1Id(value: Int): CameraId = CameraId("$value")
+value class CameraId(val value: String) {
+    companion object {
+        inline fun fromCamera2Id(value: String): CameraId = CameraId(value)
+        inline fun fromCamera1Id(value: Int): CameraId = CameraId("$value")
     }
 
     /**
@@ -115,8 +115,8 @@
      *
      * @return The parsed Camera1 id, or null if the value cannot be parsed as a Camera1 id.
      */
-    public inline fun toCamera1Id(): Int? = value.toIntOrNull()
-    public override fun toString(): String = "Camera $value"
+    inline fun toCamera1Id(): Int? = value.toIntOrNull()
+    override fun toString(): String = "Camera $value"
 }
 
 /**
@@ -124,7 +124,7 @@
  * metadata of cameras that are otherwise hidden. Metadata for hidden cameras are always returned
  * last.
  */
-public fun CameraDevices.find(
+fun CameraDevices.find(
     cameraBackendId: CameraBackendId? = null,
     includePhysicalCameraMetadata: Boolean = false
 ): Flow<CameraMetadata> = flow {
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraGraph.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraGraph.kt
index 2f647e8..7522c6a 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraGraph.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraGraph.kt
@@ -26,9 +26,324 @@
 import androidx.annotation.RequiresApi
 import androidx.camera.camera2.pipe.CameraGraph.Constants3A.DEFAULT_FRAME_LIMIT
 import androidx.camera.camera2.pipe.CameraGraph.Constants3A.DEFAULT_TIME_LIMIT_NS
+import androidx.camera.camera2.pipe.GraphState.GraphStateStarting
+import androidx.camera.camera2.pipe.GraphState.GraphStateStopped
+import androidx.camera.camera2.pipe.GraphState.GraphStateStopping
 import kotlinx.coroutines.Deferred
 import kotlinx.coroutines.flow.StateFlow
 
+/** A [CameraGraph] represents the combined configuration and state of a camera. */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+interface CameraGraph : AutoCloseable {
+    val streams: StreamGraph
+
+    /**
+     * Returns the state flow of [GraphState], which emits the current state of the [CameraGraph],
+     * including when a [CameraGraph] is stopped, starting or started.
+     */
+    val graphState: StateFlow<GraphState>
+
+    /**
+     * This will cause the [CameraGraph] to start opening the [CameraDevice] and configuring a
+     * [CameraCaptureSession]. While the CameraGraph is alive it will attempt to keep the camera
+     * open, active, and in a configured running state.
+     */
+    fun start()
+
+    /**
+     * This will cause the [CameraGraph] to stop executing requests and close the current Camera2
+     * [CameraCaptureSession] (if one is active). The most recent repeating request will be
+     * preserved, and any calls to submit a request to a session will be enqueued. To stop requests
+     * from being enqueued, close the [CameraGraph].
+     */
+    fun stop()
+
+    /** Acquire and exclusive access to the [CameraGraph] in a suspending fashion. */
+    suspend fun acquireSession(): Session
+
+    /**
+     * Try acquiring an exclusive access the [CameraGraph]. Returns null if it can't be acquired
+     * immediately.
+     */
+    fun acquireSessionOrNull(): Session?
+
+    /**
+     * This configures the camera graph to use a specific Surface for the given stream.
+     *
+     * Changing a surface may cause the camera to stall and/or reconfigure.
+     */
+    fun setSurface(stream: StreamId, surface: Surface?)
+
+    /**
+     * This defines the configuration, flags, and pre-defined structure of a [CameraGraph] instance.
+     * Note that for parameters, null is considered a valid value, and unset keys are ignored.
+     *
+     * @param camera The Camera2 [CameraId] that this [CameraGraph] represents.
+     * @param streams A list of [CameraStream]s to use when building the configuration.
+     * @param streamSharingGroups A list of [CameraStream]s to apply buffer sharing to.
+     * @param input An input configuration to support Camera2 Reprocessing.
+     * @param sessionTemplate The template id to use when creating the [CaptureRequest] to supply
+     *   the default parameters for a [SessionConfiguration] object.
+     * @param sessionParameters the extra parameters to apply to the [CaptureRequest] used to supply
+     *   the default parameters for a [SessionConfiguration] object. These parameters are *only*
+     *   used to create the [CaptureRequest] for session configuration. Use [defaultParameters] or
+     *   [requiredParameters] to enforce that the key is set for every request.
+     * @param sessionMode defines the [OperatingMode] of the session. May be used to configure a
+     *   [CameraConstrainedHighSpeedCaptureSession] for slow motion capture (If available)
+     * @param defaultTemplate The default template to be used if a [Request] does not specify one.
+     * @param defaultParameters The default parameters to be used for a [Request].
+     * @param defaultListeners A default set of listeners that will be added to every [Request].
+     * @param requiredParameters Will override any other configured parameter, and can be used to
+     *   enforce that specific keys are always set to specific value for every [CaptureRequest].
+     * @param cameraBackendId If defined, this tells the [CameraGraph] to use a specific
+     *   [CameraBackend] to open and operate the camera. The defined [camera] parameter must be a
+     *   camera that can be opened by this [CameraBackend]. If this value is null it will use the
+     *   default backend that has been configured by [CameraPipe].
+     * @param customCameraBackend If defined, this [customCameraBackend] will be created an used for
+     *   _only_ this [CameraGraph]. This cannot be defined if [cameraBackendId] is defined.
+     */
+    data class Config(
+        val camera: CameraId,
+        val streams: List<CameraStream.Config>,
+        val streamSharingGroups: List<List<CameraStream.Config>> = listOf(),
+        val input: InputStream.Config? = null,
+        val sessionTemplate: RequestTemplate = RequestTemplate(1),
+        val sessionParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
+        val sessionMode: OperatingMode = OperatingMode.NORMAL,
+        val defaultTemplate: RequestTemplate = RequestTemplate(1),
+        val defaultParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
+        val defaultListeners: List<Request.Listener> = listOf(),
+        val requiredParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
+        val cameraBackendId: CameraBackendId? = null,
+        val customCameraBackend: CameraBackendFactory? = null,
+        val metadataTransform: MetadataTransform = MetadataTransform(),
+        val flags: Flags = Flags()
+        // TODO: Internal error handling. May be better at the CameraPipe level.
+    ) {
+        init {
+            check(cameraBackendId == null || customCameraBackend == null) {
+                "Setting both cameraBackendId and customCameraBackend is not supported."
+            }
+        }
+    }
+
+    /**
+     * Flags define boolean values that are used to adjust the behavior and interactions with
+     * camera2. These flags should default to the ideal behavior and should be overridden on
+     * specific devices to be faster or to work around bad behavior.
+     */
+    data class Flags(
+        val configureBlankSessionOnStop: Boolean = false,
+        val abortCapturesOnStop: Boolean = false,
+        val allowMultipleActiveCameras: Boolean = false
+    )
+
+    enum class OperatingMode {
+        NORMAL,
+        HIGH_SPEED,
+    }
+
+    @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+    object Constants3A {
+        // Constants related to controlling the time or frame budget a 3A operation should get.
+        const val DEFAULT_FRAME_LIMIT: Int = 60
+        const val DEFAULT_TIME_LIMIT_MS: Int = 3_000
+        const val DEFAULT_TIME_LIMIT_NS: Long = 3_000_000_000L
+
+        // Constants related to metering regions.
+        /** No metering region is specified. */
+        val METERING_REGIONS_EMPTY: Array<MeteringRectangle> = emptyArray()
+
+        /**
+         * No-op metering regions, this will tell camera device to pick the right metering region
+         * for us.
+         */
+        val METERING_REGIONS_DEFAULT: Array<MeteringRectangle> =
+            arrayOf(MeteringRectangle(0, 0, 0, 0, 0))
+
+        /** Placeholder frame number for [Result3A] when a 3A method encounters an error. */
+        val FRAME_NUMBER_INVALID: FrameNumber = FrameNumber(-1L)
+    }
+
+    /**
+     * A [Session] is an interactive lock for [CameraGraph] and allows state to be changed.
+     *
+     * Holding this object prevents other systems from acquiring a [Session] until the currently
+     * held session is released. Because of its exclusive nature, [Session]s are intended for fast,
+     * short-lived state updates, or for interactive capture sequences that must not be altered.
+     * (Flash photo sequences, for example).
+     *
+     * While this object is thread-safe, it should not shared or held for long periods of time.
+     * Example: A [Session] should *not* be held during video recording.
+     */
+    interface Session : AutoCloseable {
+        /**
+         * Causes the CameraGraph to start or update the current repeating request with the provided
+         * [Request] object. The [Request] object may be cached, and may be used for other
+         * interactions with the camera (such as updating 3A, or issuing 3A triggers).
+         */
+        fun startRepeating(request: Request)
+
+        /** Stop the current repeating request. */
+        fun stopRepeating()
+
+        /**
+         * Add the [Request] into an in-flight request queue. Requests will be issued to the Camera
+         * exactly once.
+         */
+        fun submit(request: Request)
+
+        /**
+         * Add the [Request] into an in-flight request queue. Requests will be issued to the Camera
+         * exactly once. The list of [Request]s is guaranteed to be submitted together.
+         */
+        fun submit(requests: List<Request>)
+
+        /**
+         * Abort in-flight requests. This will abort *all* requests in the current
+         * CameraCaptureSession as well as any requests that are enqueued, but that have not yet
+         * been submitted to the camera.
+         */
+        fun abort()
+
+        /**
+         * Applies the given 3A parameters to the camera device.
+         *
+         * @return earliest FrameNumber at which the parameters were successfully applied.
+         */
+        fun update3A(
+            aeMode: AeMode? = null,
+            afMode: AfMode? = null,
+            awbMode: AwbMode? = null,
+            aeRegions: List<MeteringRectangle>? = null,
+            afRegions: List<MeteringRectangle>? = null,
+            awbRegions: List<MeteringRectangle>? = null
+        ): Deferred<Result3A>
+
+        /**
+         * Applies the given 3A parameters to the camera device but for only one frame.
+         *
+         * @return the FrameNumber for which these parameters were applied.
+         */
+        suspend fun submit3A(
+            aeMode: AeMode? = null,
+            afMode: AfMode? = null,
+            awbMode: AwbMode? = null,
+            aeRegions: List<MeteringRectangle>? = null,
+            afRegions: List<MeteringRectangle>? = null,
+            awbRegions: List<MeteringRectangle>? = null
+        ): Deferred<Result3A>
+
+        /**
+         * Turns the torch to ON or OFF.
+         *
+         * This method has a side effect on the currently set AE mode. Ref:
+         * https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#FLASH_MODE
+         * To use the flash control, AE mode must be set to ON or OFF. So if the AE mode is already
+         * not either ON or OFF, we will need to update the AE mode to one of those states, here we
+         * will choose ON. It is the responsibility of the application layer above CameraPipe to
+         * restore the AE mode after the torch control has been used. The [update3A] method can be
+         * used to restore the AE state to a previous value.
+         *
+         * @return the FrameNumber at which the turn was fully turned on if switch was ON, or the
+         *   FrameNumber at which it was completely turned off when the switch was OFF.
+         */
+        fun setTorch(torchState: TorchState): Deferred<Result3A>
+
+        /**
+         * Locks the auto-exposure, auto-focus and auto-whitebalance as per the given desired
+         * behaviors. This given 3A parameters are applied before the lock is obtained. If 'null'
+         * value is passed for a parameter, that parameter is ignored, and the current value for
+         * that parameter continues to be applied.
+         *
+         * @param afTriggerStartAeMode the AeMode value that should override current AeMode for
+         *   AF_TRIGGER_START request, this value should not be retained for following requests
+         * @param frameLimit the maximum number of frames to wait before we give up waiting for this
+         *   operation to complete.
+         * @param timeLimitNs the maximum time limit in ms we wait before we give up waiting for
+         *   this operation to complete.
+         * @return [Result3A], which will contain the latest frame number at which the locks were
+         *   applied or the frame number at which the method returned early because either frame
+         *   limit or time limit was reached.
+         *
+         * TODO(sushilnath@): Add support for specifying the AE, AF and AWB modes as well. The
+         *   update of modes require special care if the desired lock behavior is immediate. In that
+         *   case we have to submit a combination of repeating and single requests so that the AF
+         *   skips the initial state of the new mode's state machine and stays locks in the new mode
+         *   as well.
+         */
+        suspend fun lock3A(
+            aeMode: AeMode? = null,
+            afMode: AfMode? = null,
+            awbMode: AwbMode? = null,
+            aeRegions: List<MeteringRectangle>? = null,
+            afRegions: List<MeteringRectangle>? = null,
+            awbRegions: List<MeteringRectangle>? = null,
+            aeLockBehavior: Lock3ABehavior? = null,
+            afLockBehavior: Lock3ABehavior? = null,
+            awbLockBehavior: Lock3ABehavior? = null,
+            afTriggerStartAeMode: AeMode? = null,
+            frameLimit: Int = DEFAULT_FRAME_LIMIT,
+            timeLimitNs: Long = DEFAULT_TIME_LIMIT_NS
+        ): Deferred<Result3A>
+
+        /**
+         * Unlocks auto-exposure, auto-focus, auto-whitebalance. Once they are unlocked they get
+         * back to their initial state or resume their auto scan depending on the current mode they
+         * are operating in.
+         *
+         * Providing 'true' for a parameter in this method will unlock that component and if 'false'
+         * is provided or the parameter is not specified then it will have no effect on the lock of
+         * that component, i.e. if it was locked earlier it will stay locked and if it was already
+         * unlocked, it will stay unlocked.
+         *
+         * @return [Result3A], which will contain the latest frame number at which the auto-focus,
+         *   auto-exposure, auto-white balance were unlocked as per the method arguments.
+         */
+        suspend fun unlock3A(
+            ae: Boolean? = null,
+            af: Boolean? = null,
+            awb: Boolean? = null
+        ): Deferred<Result3A>
+
+        /**
+         * This methods does pre-capture metering sequence and locks auto-focus. Once the operation
+         * completes, we can proceed to take high-quality pictures.
+         *
+         * Note: Flash will be used during pre-capture metering and during image capture if the AE
+         * mode was set to [AeMode.ON_AUTO_FLASH] or [AeMode.ON_ALWAYS_FLASH], thus firing it for
+         * low light captures or for every capture, respectively.
+         *
+         * @param frameLimit the maximum number of frames to wait before we give up waiting for this
+         *   operation to complete.
+         * @param timeLimitNs the maximum time limit in ms we wait before we give up waiting for
+         *   this operation to complete.
+         * @return [Result3A], which will contain the latest frame number at which the locks were
+         *   applied or the frame number at which the method returned early because either frame
+         *   limit or time limit was reached.
+         */
+        suspend fun lock3AForCapture(
+            frameLimit: Int = DEFAULT_FRAME_LIMIT,
+            timeLimitNs: Long = DEFAULT_TIME_LIMIT_NS
+        ): Deferred<Result3A>
+
+        /**
+         * After submitting pre-capture metering sequence needed by [lock3AForCapture] method, the
+         * camera system can internally lock the auto-exposure routine for subsequent still image
+         * capture, and if not image capture request is submitted the auto-exposure may not resume
+         * it's normal scan. This method brings focus and exposure back to normal after high quality
+         * image captures using [lock3AForCapture] method.
+         */
+        suspend fun unlock3APostCapture(): Deferred<Result3A>
+    }
+}
+
+/**
+ * GraphState represents public the public facing state of a [CameraGraph] instance. When created,
+ * a [CameraGraph] starts in [GraphStateStopped]. Calling [CameraGraph.start] puts the graph into
+ * [GraphStateStarting], and [CameraGraph.stop] puts the graph into [GraphStateStopping]. Remaining
+ * states are produced by the underlying camera as a result of these start/stop calls.
+ */
 abstract class GraphState internal constructor() {
     /**
      * When the [CameraGraph] is starting. This means we're in the process of opening a (virtual)
@@ -60,309 +375,3 @@
     class GraphStateError(val cameraError: CameraError, val willAttemptRetry: Boolean) :
         GraphState()
 }
-
-/** A [CameraGraph] represents the combined configuration and state of a camera. */
-@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public interface CameraGraph : AutoCloseable {
-    public val streams: StreamGraph
-
-    /**
-     * Returns the state flow of [GraphState], which emits the current state of the [CameraGraph],
-     * including when a [CameraGraph] is stopped, starting or started.
-     */
-    public val graphState: StateFlow<GraphState>
-
-    /**
-     * This will cause the [CameraGraph] to start opening the [CameraDevice] and configuring a
-     * [CameraCaptureSession]. While the CameraGraph is alive it will attempt to keep the camera
-     * open, active, and in a configured running state.
-     */
-    public fun start()
-
-    /**
-     * This will cause the [CameraGraph] to stop executing requests and close the current Camera2
-     * [CameraCaptureSession] (if one is active). The most recent repeating request will be
-     * preserved, and any calls to submit a request to a session will be enqueued. To stop requests
-     * from being enqueued, close the [CameraGraph].
-     */
-    public fun stop()
-
-    /** Acquire and exclusive access to the [CameraGraph] in a suspending fashion. */
-    public suspend fun acquireSession(): Session
-
-    /**
-     * Try acquiring an exclusive access the [CameraGraph]. Returns null if it can't be acquired
-     * immediately.
-     */
-    public fun acquireSessionOrNull(): Session?
-
-    /**
-     * This configures the camera graph to use a specific Surface for the given stream.
-     *
-     * Changing a surface may cause the camera to stall and/or reconfigure.
-     */
-    public fun setSurface(stream: StreamId, surface: Surface?)
-
-    /**
-     * This defines the configuration, flags, and pre-defined structure of a [CameraGraph] instance.
-     * Note that for parameters, null is considered a valid value, and unset keys are ignored.
-     *
-     * @param camera The Camera2 [CameraId] that this [CameraGraph] represents.
-     * @param streams A list of [CameraStream]s to use when building the configuration.
-     * @param streamSharingGroups A list of [CameraStream]s to apply buffer sharing to.
-     * @param input An input configuration to support Camera2 Reprocessing.
-     * @param sessionTemplate The template id to use when creating the [CaptureRequest] to supply
-     *   the default parameters for a [SessionConfiguration] object.
-     * @param sessionParameters the extra parameters to apply to the [CaptureRequest] used to supply
-     *   the default parameters for a [SessionConfiguration] object. These parameters are *only*
-     *   used to create the [CaptureRequest] for session configuration. Use [defaultParameters] or
-     *   [requiredParameters] to enforce that the key is set for every request.
-     * @param sessionMode defines the [OperatingMode] of the session. May be used to configure a
-     *   [CameraConstrainedHighSpeedCaptureSession] for slow motion capture (If available)
-     * @param defaultTemplate The default template to be used if a [Request] does not specify one.
-     * @param defaultParameters The default parameters to be used for a [Request].
-     * @param defaultListeners A default set of listeners that will be added to every [Request].
-     * @param requiredParameters Will override any other configured parameter, and can be used to
-     *   enforce that specific keys are always set to specific value for every [CaptureRequest].
-     * @param cameraBackendId If defined, this tells the [CameraGraph] to use a specific
-     *   [CameraBackend] to open and operate the camera. The defined [camera] parameter must be a
-     *   camera that can be opened by this [CameraBackend]. If this value is null it will use the
-     *   default backend that has been configured by [CameraPipe].
-     * @param customCameraBackend If defined, this [customCameraBackend] will be created an used for
-     *   _only_ this [CameraGraph]. This cannot be defined if [cameraBackendId] is defined.
-     */
-    public data class Config(
-        val camera: CameraId,
-        val streams: List<CameraStream.Config>,
-        val streamSharingGroups: List<List<CameraStream.Config>> = listOf(),
-        val input: InputStream.Config? = null,
-        val sessionTemplate: RequestTemplate = RequestTemplate(1),
-        val sessionParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
-        val sessionMode: OperatingMode = OperatingMode.NORMAL,
-        val defaultTemplate: RequestTemplate = RequestTemplate(1),
-        val defaultParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
-        val defaultListeners: List<Request.Listener> = listOf(),
-        val requiredParameters: Map<*, Any?> = emptyMap<Any, Any?>(),
-        val cameraBackendId: CameraBackendId? = null,
-        val customCameraBackend: CameraBackendFactory? = null,
-        val metadataTransform: MetadataTransform = MetadataTransform(),
-        val flags: Flags = Flags()
-        // TODO: Internal error handling. May be better at the CameraPipe level.
-    ) {
-        init {
-            check(cameraBackendId == null || customCameraBackend == null) {
-                "Setting both cameraBackendId and customCameraBackend is not supported."
-            }
-        }
-    }
-
-    /**
-     * Flags define boolean values that are used to adjust the behavior and interactions with
-     * camera2. These flags should default to the ideal behavior and should be overridden on
-     * specific devices to be faster or to work around bad behavior.
-     */
-    public data class Flags(
-        val configureBlankSessionOnStop: Boolean = false,
-        val abortCapturesOnStop: Boolean = false,
-        val allowMultipleActiveCameras: Boolean = false
-    )
-
-    public enum class OperatingMode {
-        NORMAL,
-        HIGH_SPEED,
-    }
-
-    @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-    public object Constants3A {
-        // Constants related to controlling the time or frame budget a 3A operation should get.
-        public const val DEFAULT_FRAME_LIMIT: Int = 60
-        public const val DEFAULT_TIME_LIMIT_MS: Int = 3_000
-        public const val DEFAULT_TIME_LIMIT_NS: Long = 3_000_000_000L
-
-        // Constants related to metering regions.
-        /** No metering region is specified. */
-        public val METERING_REGIONS_EMPTY: Array<MeteringRectangle> = emptyArray()
-
-        /**
-         * No-op metering regions, this will tell camera device to pick the right metering region
-         * for us.
-         */
-        public val METERING_REGIONS_DEFAULT: Array<MeteringRectangle> =
-            arrayOf(MeteringRectangle(0, 0, 0, 0, 0))
-
-        /** Placeholder frame number for [Result3A] when a 3A method encounters an error. */
-        public val FRAME_NUMBER_INVALID: FrameNumber = FrameNumber(-1L)
-    }
-
-    /**
-     * A [Session] is an interactive lock for [CameraGraph] and allows state to be changed.
-     *
-     * Holding this object prevents other systems from acquiring a [Session] until the currently
-     * held session is released. Because of its exclusive nature, [Session]s are intended for fast,
-     * short-lived state updates, or for interactive capture sequences that must not be altered.
-     * (Flash photo sequences, for example).
-     *
-     * While this object is thread-safe, it should not shared or held for long periods of time.
-     * Example: A [Session] should *not* be held during video recording.
-     */
-    public interface Session : AutoCloseable {
-        /**
-         * Causes the CameraGraph to start or update the current repeating request with the provided
-         * [Request] object. The [Request] object may be cached, and may be used for other
-         * interactions with the camera (such as updating 3A, or issuing 3A triggers).
-         */
-        public fun startRepeating(request: Request)
-
-        /** Stop the current repeating request. */
-        public fun stopRepeating()
-
-        /**
-         * Add the [Request] into an in-flight request queue. Requests will be issued to the Camera
-         * exactly once.
-         */
-        public fun submit(request: Request)
-
-        /**
-         * Add the [Request] into an in-flight request queue. Requests will be issued to the Camera
-         * exactly once. The list of [Request]s is guaranteed to be submitted together.
-         */
-        public fun submit(requests: List<Request>)
-
-        /**
-         * Abort in-flight requests. This will abort *all* requests in the current
-         * CameraCaptureSession as well as any requests that are enqueued, but that have not yet
-         * been submitted to the camera.
-         */
-        public fun abort()
-
-        /**
-         * Applies the given 3A parameters to the camera device.
-         *
-         * @return earliest FrameNumber at which the parameters were successfully applied.
-         */
-        public fun update3A(
-            aeMode: AeMode? = null,
-            afMode: AfMode? = null,
-            awbMode: AwbMode? = null,
-            aeRegions: List<MeteringRectangle>? = null,
-            afRegions: List<MeteringRectangle>? = null,
-            awbRegions: List<MeteringRectangle>? = null
-        ): Deferred<Result3A>
-
-        /**
-         * Applies the given 3A parameters to the camera device but for only one frame.
-         *
-         * @return the FrameNumber for which these parameters were applied.
-         */
-        public suspend fun submit3A(
-            aeMode: AeMode? = null,
-            afMode: AfMode? = null,
-            awbMode: AwbMode? = null,
-            aeRegions: List<MeteringRectangle>? = null,
-            afRegions: List<MeteringRectangle>? = null,
-            awbRegions: List<MeteringRectangle>? = null
-        ): Deferred<Result3A>
-
-        /**
-         * Turns the torch to ON or OFF.
-         *
-         * This method has a side effect on the currently set AE mode. Ref:
-         * https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#FLASH_MODE
-         * To use the flash control, AE mode must be set to ON or OFF. So if the AE mode is already
-         * not either ON or OFF, we will need to update the AE mode to one of those states, here we
-         * will choose ON. It is the responsibility of the application layer above CameraPipe to
-         * restore the AE mode after the torch control has been used. The [update3A] method can be
-         * used to restore the AE state to a previous value.
-         *
-         * @return the FrameNumber at which the turn was fully turned on if switch was ON, or the
-         *   FrameNumber at which it was completely turned off when the switch was OFF.
-         */
-        public fun setTorch(torchState: TorchState): Deferred<Result3A>
-
-        /**
-         * Locks the auto-exposure, auto-focus and auto-whitebalance as per the given desired
-         * behaviors. This given 3A parameters are applied before the lock is obtained. If 'null'
-         * value is passed for a parameter, that parameter is ignored, and the current value for
-         * that parameter continues to be applied.
-         *
-         * @param afTriggerStartAeMode the AeMode value that should override current AeMode for
-         *   AF_TRIGGER_START request, this value should not be retained for following requests
-         * @param frameLimit the maximum number of frames to wait before we give up waiting for this
-         *   operation to complete.
-         * @param timeLimitNs the maximum time limit in ms we wait before we give up waiting for
-         *   this operation to complete.
-         * @return [Result3A], which will contain the latest frame number at which the locks were
-         *   applied or the frame number at which the method returned early because either frame
-         *   limit or time limit was reached.
-         *
-         * TODO(sushilnath@): Add support for specifying the AE, AF and AWB modes as well. The
-         *   update of modes require special care if the desired lock behavior is immediate. In that
-         *   case we have to submit a combination of repeating and single requests so that the AF
-         *   skips the initial state of the new mode's state machine and stays locks in the new mode
-         *   as well.
-         */
-        public suspend fun lock3A(
-            aeMode: AeMode? = null,
-            afMode: AfMode? = null,
-            awbMode: AwbMode? = null,
-            aeRegions: List<MeteringRectangle>? = null,
-            afRegions: List<MeteringRectangle>? = null,
-            awbRegions: List<MeteringRectangle>? = null,
-            aeLockBehavior: Lock3ABehavior? = null,
-            afLockBehavior: Lock3ABehavior? = null,
-            awbLockBehavior: Lock3ABehavior? = null,
-            afTriggerStartAeMode: AeMode? = null,
-            frameLimit: Int = DEFAULT_FRAME_LIMIT,
-            timeLimitNs: Long = DEFAULT_TIME_LIMIT_NS
-        ): Deferred<Result3A>
-
-        /**
-         * Unlocks auto-exposure, auto-focus, auto-whitebalance. Once they are unlocked they get
-         * back to their initial state or resume their auto scan depending on the current mode they
-         * are operating in.
-         *
-         * Providing 'true' for a parameter in this method will unlock that component and if 'false'
-         * is provided or the parameter is not specified then it will have no effect on the lock of
-         * that component, i.e if it was locked earlier it will stay locked and if it was already
-         * unlocked, it will stay unlocked.
-         *
-         * @return [Result3A], which will contain the latest frame number at which the auto-focus,
-         *   auto-exposure, auto-white balance were unlocked as per the method arguments.
-         */
-        public suspend fun unlock3A(
-            ae: Boolean? = null,
-            af: Boolean? = null,
-            awb: Boolean? = null
-        ): Deferred<Result3A>
-
-        /**
-         * This methods does pre-capture metering sequence and locks auto-focus. Once the operation
-         * completes, we can proceed to take high-quality pictures.
-         *
-         * Note: Flash will be used during pre-capture metering and during image capture if the AE
-         * mode was set to [AeMode.ON_AUTO_FLASH] or [AeMode.ON_ALWAYS_FLASH], thus firing it for
-         * low light captures or for every capture, respectively.
-         *
-         * @param frameLimit the maximum number of frames to wait before we give up waiting for this
-         *   operation to complete.
-         * @param timeLimitNs the maximum time limit in ms we wait before we give up waiting for
-         *   this operation to complete.
-         * @return [Result3A], which will contain the latest frame number at which the locks were
-         *   applied or the frame number at which the method returned early because either frame
-         *   limit or time limit was reached.
-         */
-        public suspend fun lock3AForCapture(
-            frameLimit: Int = DEFAULT_FRAME_LIMIT,
-            timeLimitNs: Long = DEFAULT_TIME_LIMIT_NS
-        ): Deferred<Result3A>
-
-        /**
-         * After submitting pre-capture metering sequence needed by [lock3AForCapture] method, the
-         * camera system can internally lock the auto-exposure routine for subsequent still image
-         * capture, and if not image capture request is submitted the auto-exposure may not resume
-         * it's normal scan. This method brings focus and exposure back to normal after high quality
-         * image captures using [lock3AForCapture] method.
-         */
-        public suspend fun unlock3APostCapture(): Deferred<Result3A>
-    }
-}
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraMetadata.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraMetadata.kt
index 01ad490..24cbc65 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraMetadata.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraMetadata.kt
@@ -32,23 +32,23 @@
  * directly. This allows code to get reasonable behavior for all properties across all OS levels and
  * makes behavior that depends on [CameraMetadata] easier to test and reason about.
  */
-public interface CameraMetadata : Metadata, UnsafeWrapper {
-    public operator fun <T> get(key: CameraCharacteristics.Key<T>): T?
-    public fun <T> getOrDefault(key: CameraCharacteristics.Key<T>, default: T): T
+interface CameraMetadata : Metadata, UnsafeWrapper {
+    operator fun <T> get(key: CameraCharacteristics.Key<T>): T?
+    fun <T> getOrDefault(key: CameraCharacteristics.Key<T>, default: T): T
 
-    public val camera: CameraId
-    public val isRedacted: Boolean
+    val camera: CameraId
+    val isRedacted: Boolean
 
-    public val keys: Set<CameraCharacteristics.Key<*>>
-    public val requestKeys: Set<CaptureRequest.Key<*>>
-    public val resultKeys: Set<CaptureResult.Key<*>>
-    public val sessionKeys: Set<CaptureRequest.Key<*>>
+    val keys: Set<CameraCharacteristics.Key<*>>
+    val requestKeys: Set<CaptureRequest.Key<*>>
+    val resultKeys: Set<CaptureResult.Key<*>>
+    val sessionKeys: Set<CaptureRequest.Key<*>>
 
-    public val physicalCameraIds: Set<CameraId>
-    public val physicalRequestKeys: Set<CaptureRequest.Key<*>>
+    val physicalCameraIds: Set<CameraId>
+    val physicalRequestKeys: Set<CaptureRequest.Key<*>>
 
-    public suspend fun getPhysicalMetadata(cameraId: CameraId): CameraMetadata
-    public fun awaitPhysicalMetadata(cameraId: CameraId): CameraMetadata
+    suspend fun getPhysicalMetadata(cameraId: CameraId): CameraMetadata
+    fun awaitPhysicalMetadata(cameraId: CameraId): CameraMetadata
 }
 
 /**
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraPipe.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraPipe.kt
index d5e8cf2..d90a1bc 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraPipe.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraPipe.kt
@@ -49,7 +49,7 @@
  * [android.hardware.camera2.CameraDevice] and [android.hardware.camera2.CameraCaptureSession] via
  * the [CameraGraph] interface.
  */
-public class CameraPipe(config: Config) {
+class CameraPipe(config: Config) {
     private val debugId = cameraPipeIds.incrementAndGet()
     private val component: CameraPipeComponent =
         DaggerCameraPipeComponent.builder()
@@ -61,7 +61,7 @@
      * This creates a new [CameraGraph] that can be used to interact with a single Camera on the
      * device. Multiple [CameraGraph]s can be created, but only one should be active at a time.
      */
-    public fun create(config: CameraGraph.Config): CameraGraph {
+    fun create(config: CameraGraph.Config): CameraGraph {
         return component
             .cameraGraphComponentBuilder()
             .cameraGraphConfigModule(CameraGraphConfigModule(config))
@@ -70,12 +70,12 @@
     }
 
     /** This provides access to information about the available cameras on the device. */
-    public fun cameras(): CameraDevices {
+    fun cameras(): CameraDevices {
         return component.cameras()
     }
 
     /** This returns [CameraSurfaceManager] which tracks the lifetime of Surfaces in CameraPipe. */
-    public fun cameraSurfaceManager(): CameraSurfaceManager {
+    fun cameraSurfaceManager(): CameraSurfaceManager {
         return component.cameraSurfaceManager()
     }
 
@@ -83,7 +83,7 @@
      * Application level configuration for [CameraPipe]. Nullable values are optional and reasonable
      * defaults will be provided if values are not specified.
      */
-    public data class Config(
+    data class Config(
         val appContext: Context,
         val threadConfig: ThreadConfig = ThreadConfig(),
         val cameraMetadataConfig: CameraMetadataConfig = CameraMetadataConfig(),
@@ -95,7 +95,7 @@
      * Application level configuration for Camera2Interop callbacks. If set, these callbacks will be
      * triggered at the appropriate places in Camera-Pipe.
      */
-    public data class CameraInteropConfig(
+    data class CameraInteropConfig(
         val cameraDeviceStateCallback: CameraDevice.StateCallback? = null,
         val cameraSessionStateCallback: CameraCaptureSession.StateCallback? = null
     )
@@ -114,7 +114,7 @@
      * - [testOnlyScope] is used for testing to overwrite the internal global scope with the test
      *   method scope.
      */
-    public data class ThreadConfig(
+    data class ThreadConfig(
         val defaultLightweightExecutor: Executor? = null,
         val defaultBackgroundExecutor: Executor? = null,
         val defaultBlockingExecutor: Executor? = null,
@@ -132,9 +132,9 @@
      * @param cameraCacheBlocklist is used to prevent the metadata backend from caching the results
      *   of specific keys for specific cameraIds.
      */
-    public class CameraMetadataConfig(
-        public val cacheBlocklist: Set<CameraCharacteristics.Key<*>> = emptySet(),
-        public val cameraCacheBlocklist: Map<CameraId, Set<CameraCharacteristics.Key<*>>> =
+    class CameraMetadataConfig(
+        val cacheBlocklist: Set<CameraCharacteristics.Key<*>> = emptySet(),
+        val cameraCacheBlocklist: Map<CameraId, Set<CameraCharacteristics.Key<*>>> =
             emptyMap()
     )
 
@@ -187,7 +187,7 @@
             "CameraPipe.External is deprecated, use customCameraBackend on " +
                 "GraphConfig instead."
         )
-        public fun create(
+        fun create(
             config: CameraGraph.Config,
             cameraMetadata: CameraMetadata,
             requestProcessor: RequestProcessor
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraSurfaceManager.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraSurfaceManager.kt
index 4278c31..b0fecf3 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraSurfaceManager.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CameraSurfaceManager.kt
@@ -45,7 +45,7 @@
  */
 @Singleton
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public class CameraSurfaceManager @Inject constructor() {
+class CameraSurfaceManager @Inject constructor() {
 
     private val lock = Any()
 
@@ -95,7 +95,7 @@
      * Adds a [SurfaceListener] to receive [Surface] lifetime updates. When a listener is added, it
      * will receive [SurfaceListener.onSurfaceActive] for all active Surfaces.
      */
-    public fun addListener(listener: SurfaceListener) {
+    fun addListener(listener: SurfaceListener) {
         val activeSurfaces =
             synchronized(lock) {
                 listeners.add(listener)
@@ -106,7 +106,7 @@
     }
 
     /** Removes a [SurfaceListener] to stop receiving [Surface] lifetime updates. */
-    public fun removeListener(listener: SurfaceListener) {
+    fun removeListener(listener: SurfaceListener) {
         synchronized(lock) { listeners.remove(listener) }
     }
 
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequence.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequence.kt
index a85bc64..144ccc2 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequence.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequence.kt
@@ -22,7 +22,7 @@
  *
  * A CaptureSequence should be created from a [CaptureSequenceProcessor].
  */
-public interface CaptureSequence<out TCaptureRequest> {
+interface CaptureSequence<out TCaptureRequest> {
     val cameraId: CameraId
     val repeating: Boolean
     val captureRequestList: List<TCaptureRequest>
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequenceProcessor.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequenceProcessor.kt
index 8e33af1..9c6cc07 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequenceProcessor.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/CaptureSequenceProcessor.kt
@@ -17,7 +17,7 @@
 package androidx.camera.camera2.pipe
 
 /** Create and submit [CaptureSequence]s to an active camera instance. */
-public interface CaptureSequenceProcessor<
+interface CaptureSequenceProcessor<
     out TCaptureRequest, TCaptureSequence : CaptureSequence<TCaptureRequest>> {
 
     /**
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Frames.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Frames.kt
new file mode 100644
index 0000000..5409f4f
--- /dev/null
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Frames.kt
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@file:RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+
+package androidx.camera.camera2.pipe
+
+import android.hardware.camera2.CaptureResult
+import android.hardware.camera2.TotalCaptureResult
+import androidx.annotation.RequiresApi
+
+/**
+ * A [FrameNumber] is the identifier that represents a specific exposure by the Camera. FrameNumbers
+ * increase within a specific CameraCaptureSession, and are not created until the HAL begins
+ * processing a request.
+ */
+@JvmInline
+value class FrameNumber(val value: Long)
+
+/** [FrameInfo] is a wrapper around [TotalCaptureResult]. */
+interface FrameInfo : UnsafeWrapper {
+    val metadata: FrameMetadata
+
+    /**
+     * If this [FrameInfo] was produced from a logical camera there will be metadata associated with
+     * the physical streams that were sent to the camera.
+     */
+    operator fun get(camera: CameraId): FrameMetadata?
+
+    val camera: CameraId
+    val frameNumber: FrameNumber
+    val requestMetadata: RequestMetadata
+}
+
+/** [FrameMetadata] is a wrapper around [CaptureResult]. */
+interface FrameMetadata : Metadata, UnsafeWrapper {
+    operator fun <T> get(key: CaptureResult.Key<T>): T?
+    fun <T> getOrDefault(key: CaptureResult.Key<T>, default: T): T
+
+    val camera: CameraId
+    val frameNumber: FrameNumber
+
+    /**
+     * Extra metadata will override values defined by the wrapped CaptureResult object. This is
+     * exposed separately to allow other systems to know what is altered relative to Camera2.
+     */
+    val extraMetadata: Map<*, Any?>
+}
+
+/**
+ * This defines a metadata transform that will be applied to the data produced by
+ * [Request.Listener.onTotalCaptureResult]. The returned map will override the values returned by
+ * TotalCaptureResult. Setting the offset and window size will cause the
+ * [Request.Listener.onComplete] method to be delayed so that the transform can be run on future
+ * metadata.
+ */
+data class MetadataTransform(
+    /**
+     * This defines the number of historical [TotalCaptureResult] objects this transform is allowed
+     * to look at. Setting this value to > 0 increases the number of [TotalCaptureResult] the
+     * [CameraGraph] will hold on to.
+     */
+    val past: Int = 0,
+
+    /**
+     * This defines the number of future [TotalCaptureResult] objects this transform is allowed to
+     * look at. Setting this value to > 0 will cause [Request.Listener.onComplete] to be delayed by
+     * the number of frames specified here.
+     */
+    val future: Int = 0,
+
+    /**
+     * This transform function will be invoked at high speed, and may be invoked multiple times if
+     * correcting physical camera results.
+     *
+     * the returned values should be limited to values that will override the default values that
+     * are set on the TotalCaptureResult for this frame.
+     */
+    val transformFn: TransformFn = object : TransformFn {}
+) {
+    init {
+        check(past >= 0)
+        check(future >= 0)
+    }
+
+    interface TransformFn {
+        fun computeOverridesFor(
+            result: FrameInfo,
+            camera: CameraId,
+            related: List<FrameInfo?>
+        ): Map<*, Any?> = emptyMap<Any, Any?>()
+    }
+}
\ No newline at end of file
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Metadata.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Metadata.kt
index 19022c4..57d8d16 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Metadata.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Metadata.kt
@@ -18,10 +18,6 @@
 
 package androidx.camera.camera2.pipe
 
-import android.hardware.camera2.CaptureRequest
-import android.hardware.camera2.CaptureResult
-import android.hardware.camera2.TotalCaptureResult
-import android.view.Surface
 import androidx.annotation.RequiresApi
 
 /**
@@ -32,13 +28,13 @@
  *
  * These interfaces are read-only.
  */
-public interface Metadata {
-    public operator fun <T> get(key: Key<T>): T?
-    public fun <T> getOrDefault(key: Key<T>, default: T): T
+interface Metadata {
+    operator fun <T> get(key: Key<T>): T?
+    fun <T> getOrDefault(key: Key<T>, default: T): T
 
     /** Metadata keys provide values or controls that are provided or computed by CameraPipe. */
-    public class Key<T> private constructor(private val name: String) {
-        public companion object {
+    class Key<T> private constructor(private val name: String) {
+        companion object {
             @JvmStatic
             internal val keys: MutableSet<String> = HashSet()
 
@@ -46,7 +42,7 @@
              * This will create a new Key instance, and will check to see that the key has not been
              * previously created somewhere else.
              */
-            public fun <T> create(name: String): Key<T> {
+            fun <T> create(name: String): Key<T> {
                 synchronized(keys) { check(keys.add(name)) { "$name is already defined!" } }
                 return Key(name)
             }
@@ -57,181 +53,3 @@
         }
     }
 }
-
-/**
- * RequestMetadata wraps together all of the information about a specific CaptureRequest that was
- * submitted to Camera2.
- *
- * <p> This class is distinct from [Request] which is used to configure and issue a request to the
- * [CameraGraph]. This class will report the actual keys / values that were sent to camera2 (if
- * different) from the request that was used to create the Camera2 [CaptureRequest].
- */
-public interface RequestMetadata : Metadata, UnsafeWrapper {
-    public operator fun <T> get(key: CaptureRequest.Key<T>): T?
-    public fun <T> getOrDefault(key: CaptureRequest.Key<T>, default: T): T
-
-    /** The actual Camera2 template that was used when creating this [CaptureRequest] */
-    public val template: RequestTemplate
-
-    /**
-     * A Map of StreamId(s) that were submitted with this CaptureRequest and the Surface(s) used for
-     * this request. It's possible that not all of the streamId's specified in the [Request] are
-     * present in the [CaptureRequest].
-     */
-    public val streams: Map<StreamId, Surface>
-
-    /** Returns true if this is used in a repeating request. */
-    public val repeating: Boolean
-
-    /** The request object that was used to create this [CaptureRequest] */
-    public val request: Request
-
-    /** An internal number used to identify a specific [CaptureRequest] */
-    public val requestNumber: RequestNumber
-}
-
-/** [FrameInfo] is a wrapper around [TotalCaptureResult]. */
-public interface FrameInfo : UnsafeWrapper {
-    public val metadata: FrameMetadata
-
-    /**
-     * If this [FrameInfo] was produced from a logical camera there will be metadata associated with
-     * the physical streams that were sent to the camera.
-     */
-    public operator fun get(camera: CameraId): FrameMetadata?
-
-    public val camera: CameraId
-    public val frameNumber: FrameNumber
-    public val requestMetadata: RequestMetadata
-}
-
-/** [FrameMetadata] is a wrapper around [CaptureResult]. */
-public interface FrameMetadata : Metadata, UnsafeWrapper {
-    public operator fun <T> get(key: CaptureResult.Key<T>): T?
-    public fun <T> getOrDefault(key: CaptureResult.Key<T>, default: T): T
-
-    public val camera: CameraId
-    public val frameNumber: FrameNumber
-
-    /**
-     * Extra metadata will override values defined by the wrapped CaptureResult object. This is
-     * exposed separately to allow other systems to know what is altered relative to Camera2.
-     */
-    public val extraMetadata: Map<*, Any?>
-}
-
-/**
- * This defines a metadata transform that will be applied to the data produced by
- * [Request.Listener.onTotalCaptureResult]. The returned map will override the values returned by
- * TotalCaptureResult. Setting the offset and window size will cause the
- * [Request.Listener.onComplete] method to be delayed so that the transform can be run on future
- * metadata.
- */
-public data class MetadataTransform(
-    /**
-     * This defines the number of historical [TotalCaptureResult] objects this transform is allowed
-     * to look at. Setting this value to > 0 increases the number of [TotalCaptureResult] the
-     * [CameraGraph] will hold on to.
-     */
-    val past: Int = 0,
-
-    /**
-     * This defines the number of future [TotalCaptureResult] objects this transform is allowed to
-     * look at. Setting this value to > 0 will cause [Request.Listener.onComplete] to be delayed by
-     * the number of frames specified here.
-     */
-    val future: Int = 0,
-
-    /**
-     * This transform function will be invoked at high speed, and may be invoked multiple times if
-     * correcting physical camera results.
-     *
-     * the returned values should be limited to values that will override the default values that
-     * are set on the TotalCaptureResult for this frame.
-     */
-    val transformFn: TransformFn = object : TransformFn {}
-) {
-    init {
-        check(past >= 0)
-        check(future >= 0)
-    }
-
-    public interface TransformFn {
-        public fun computeOverridesFor(
-            result: FrameInfo,
-            camera: CameraId,
-            related: List<FrameInfo?>
-        ): Map<*, Any?> = emptyMap<Any, Any?>()
-    }
-}
-
-/**
- * A [RequestTemplate] indicates which preset set list of parameters will be applied to a request by
- * default. These values are defined by camera2.
- */
-@JvmInline
-public value class RequestTemplate(public val value: Int) {
-    val name: String
-        get() {
-            return when (value) {
-                1 -> "TEMPLATE_PREVIEW"
-                2 -> "TEMPLATE_STILL_CAPTURE"
-                3 -> "TEMPLATE_RECORD"
-                4 -> "TEMPLATE_VIDEO_SNAPSHOT"
-                5 -> "TEMPLATE_ZERO_SHUTTER_LAG"
-                6 -> "TEMPLATE_MANUAL"
-                else -> "UNKNOWN-$value"
-            }
-        }
-}
-
-/**
- * A [RequestNumber] is an artificial identifier that is created for each request that is submitted
- * to the Camera.
- */
-@JvmInline
-public value class RequestNumber(public val value: Long)
-
-/**
- * A [FrameNumber] is the identifier that represents a specific exposure by the Camera. FrameNumbers
- * increase within a specific CameraCaptureSession, and are not created until the HAL begins
- * processing a request.
- */
-@JvmInline
-public value class FrameNumber(public val value: Long)
-
-/**
- * This is a timestamp from the Camera, and corresponds to the nanosecond exposure time of a Frame.
- * While the value is expressed in nano-seconds, the precision may be much lower. In addition, the
- * time-base of the Camera is undefined, although it's common for it to be in either Monotonic or
- * Realtime.
- *
- * <p> Timestamp may differ from timestamps that are obtained from other parts of the Camera and
- * media systems within the same device. For example, it's common for high frequency sensors to
- * operate based on a real-time clock, while audio/visual systems commonly operate based on a
- * monotonic clock.
- */
-@JvmInline
-public value class CameraTimestamp(public val value: Long)
-
-/** Utility function to help deal with the unsafe nature of the typed Key/Value pairs. */
-public fun CaptureRequest.Builder.writeParameters(parameters: Map<*, Any?>) {
-    for ((key, value) in parameters) {
-        writeParameter(key, value)
-    }
-}
-
-/** Utility function to help deal with the unsafe nature of the typed Key/Value pairs. */
-public fun CaptureRequest.Builder.writeParameter(key: Any?, value: Any?) {
-    if (key != null && key is CaptureRequest.Key<*>) {
-        @Suppress("UNCHECKED_CAST") this.set(key as CaptureRequest.Key<Any>, value)
-    }
-}
-
-/**
- * Utility function to put all metadata in the current map through an unchecked cast. The unchecked
- * cast is necessary since CameraGraph.Config uses Map<*, Any?> as the standard type for parameters.
- */
-fun MutableMap<Any, Any?>.putAllMetadata(metadata: Map<*, Any?>) {
-    @Suppress("UNCHECKED_CAST") this.putAll(metadata as Map<Any, Any?>)
-}
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Request.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Requests.kt
similarity index 67%
rename from camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Request.kt
rename to camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Requests.kt
index 69f12de..6781c8d 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Request.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Requests.kt
@@ -22,9 +22,17 @@
 import android.hardware.camera2.CameraDevice
 import android.hardware.camera2.CaptureFailure
 import android.hardware.camera2.CaptureRequest
+import android.view.Surface
 import androidx.annotation.RequiresApi
 
 /**
+ * A [RequestNumber] is an artificial identifier that is created for each request that is submitted
+ * to the Camera.
+ */
+@JvmInline
+value class RequestNumber(val value: Long)
+
+/**
  * A [Request] is an immutable package of outputs and parameters needed to issue a [CaptureRequest]
  * to a Camera2 [CameraCaptureSession].
  *
@@ -41,13 +49,15 @@
  *
  * @param streams The list of streams to submit. Each request *must* have 1 or more valid streams.
  */
-public data class Request(
+data class Request(
     val streams: List<StreamId>,
     val parameters: Map<CaptureRequest.Key<*>, Any> = emptyMap(),
     val extras: Map<Metadata.Key<*>, Any> = emptyMap(),
     val listeners: List<Listener> = emptyList(),
     val template: RequestTemplate? = null
 ) {
+    operator fun <T> get(key: CaptureRequest.Key<T>): T? = getUnchecked(key)
+    operator fun <T> get(key: Metadata.Key<T>): T? = getUnchecked(key)
 
     /**
      * This listener is used to observe the state and progress of a [Request] that has been issued
@@ -56,7 +66,7 @@
      * in a repeating request may be issued multiple times within the same session, and should not
      * rely on [onRequestSequenceSubmitted] from being invoked only once.
      */
-    public interface Listener {
+    interface Listener {
         /**
          * This event indicates that the camera sensor has started exposing the frame associated
          * with this Request. The timestamp will either be the beginning or end of the sensors
@@ -68,7 +78,7 @@
          * @param timestamp the android timestamp in nanos for this exposure
          * @see android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureStarted
          */
-        public fun onStarted(
+        fun onStarted(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             timestamp: CameraTimestamp
@@ -85,7 +95,7 @@
          * @param captureResult the current android capture result for this exposure
          * @see android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureStarted
          */
-        public fun onPartialCaptureResult(
+        fun onPartialCaptureResult(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             captureResult: FrameMetadata
@@ -103,7 +113,7 @@
          * @param totalCaptureResult the final android capture result for this exposure
          * @see android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureStarted
          */
-        public fun onTotalCaptureResult(
+        fun onTotalCaptureResult(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             totalCaptureResult: FrameInfo
@@ -121,7 +131,7 @@
          * @param frameNumber the android frame number for this exposure
          * @param result the package of metadata associated with this result.
          */
-        public fun onComplete(
+        fun onComplete(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             result: FrameInfo
@@ -139,7 +149,7 @@
          * @param captureFailure the android [CaptureFailure] data
          * @see android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureFailed
          */
-        public fun onFailed(
+        fun onFailed(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             captureFailure: CaptureFailure
@@ -156,7 +166,7 @@
          * @param stream the internal stream that will not receive a buffer for this frame.
          * @see android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureBufferLost
          */
-        public fun onBufferLost(
+        fun onBufferLost(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber,
             stream: StreamId
@@ -171,7 +181,7 @@
          *
          * @param request information about this specific request.
          */
-        public fun onAborted(request: Request) {}
+        fun onAborted(request: Request) {}
 
         /**
          * Invoked after the CaptureRequest(s) have been created, but before the request is
@@ -180,7 +190,7 @@
          *
          * @param requestMetadata information about this specific request.
          */
-        public fun onRequestSequenceCreated(requestMetadata: RequestMetadata) {}
+        fun onRequestSequenceCreated(requestMetadata: RequestMetadata) {}
 
         /**
          * Invoked after the CaptureRequest(s) has been submitted. This method may be invoked
@@ -188,7 +198,7 @@
          *
          * @param requestMetadata the data about the camera2 request that was sent to the camera.
          */
-        public fun onRequestSequenceSubmitted(requestMetadata: RequestMetadata) {}
+        fun onRequestSequenceSubmitted(requestMetadata: RequestMetadata) {}
 
         /**
          * Invoked by Camera2 if the request was aborted after having been submitted. This method is
@@ -198,7 +208,7 @@
          * @see
          *   android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureSequenceAborted
          */
-        public fun onRequestSequenceAborted(requestMetadata: RequestMetadata) {}
+        fun onRequestSequenceAborted(requestMetadata: RequestMetadata) {}
 
         /**
          * Invoked by Camera2 if the request was completed after having been submitted. This method
@@ -210,27 +220,112 @@
          * @see
          *   android.hardware.camera2.CameraCaptureSession.CaptureCallback.onCaptureSequenceCompleted
          */
-        public fun onRequestSequenceCompleted(
+        fun onRequestSequenceCompleted(
             requestMetadata: RequestMetadata,
             frameNumber: FrameNumber
         ) {
         }
     }
 
-    public operator fun <T> get(key: CaptureRequest.Key<T>): T? = getUnchecked(key)
-    public operator fun <T> get(key: Metadata.Key<T>): T? = getUnchecked(key)
+    @Suppress("UNCHECKED_CAST")
+    private fun <T> getUnchecked(key: Metadata.Key<T>): T? = this.extras[key] as T?
 
     @Suppress("UNCHECKED_CAST")
-    private fun <T> Request.getUnchecked(key: Metadata.Key<T>): T? = this.extras[key] as T?
-
-    @Suppress("UNCHECKED_CAST")
-    private fun <T> Request.getUnchecked(key: CaptureRequest.Key<T>): T? =
+    private fun <T> getUnchecked(key: CaptureRequest.Key<T>): T? =
         this.parameters[key] as T?
 }
 
-public fun <T> Request.getOrDefault(key: Metadata.Key<T>, default: T): T = this[key] ?: default
+/**
+ * A [RequestTemplate] indicates which preset set list of parameters will be applied to a request by
+ * default. These values are defined by camera2.
+ */
+@JvmInline
+value class RequestTemplate(val value: Int) {
+    val name: String
+        get() {
+            return when (value) {
+                1 -> "TEMPLATE_PREVIEW"
+                2 -> "TEMPLATE_STILL_CAPTURE"
+                3 -> "TEMPLATE_RECORD"
+                4 -> "TEMPLATE_VIDEO_SNAPSHOT"
+                5 -> "TEMPLATE_ZERO_SHUTTER_LAG"
+                6 -> "TEMPLATE_MANUAL"
+                else -> "UNKNOWN-$value"
+            }
+        }
+}
 
-public fun <T> Request.getOrDefault(key: CaptureRequest.Key<T>, default: T): T =
+/**
+ * RequestMetadata wraps together all of the information about a specific CaptureRequest that was
+ * submitted to Camera2.
+ *
+ * <p> This class is distinct from [Request] which is used to configure and issue a request to the
+ * [CameraGraph]. This class will report the actual keys / values that were sent to camera2 (if
+ * different) from the request that was used to create the Camera2 [CaptureRequest].
+ */
+interface RequestMetadata : Metadata, UnsafeWrapper {
+    operator fun <T> get(key: CaptureRequest.Key<T>): T?
+    fun <T> getOrDefault(key: CaptureRequest.Key<T>, default: T): T
+
+    /** The actual Camera2 template that was used when creating this [CaptureRequest] */
+    val template: RequestTemplate
+
+    /**
+     * A Map of StreamId(s) that were submitted with this CaptureRequest and the Surface(s) used for
+     * this request. It's possible that not all of the streamId's specified in the [Request] are
+     * present in the [CaptureRequest].
+     */
+    val streams: Map<StreamId, Surface>
+
+    /** Returns true if this is used in a repeating request. */
+    val repeating: Boolean
+
+    /** The request object that was used to create this [CaptureRequest] */
+    val request: Request
+
+    /** An internal number used to identify a specific [CaptureRequest] */
+    val requestNumber: RequestNumber
+}
+
+/**
+ * This is a timestamp from the Camera, and corresponds to the nanosecond exposure time of a Frame.
+ * While the value is expressed in nano-seconds, the precision may be much lower. In addition, the
+ * time-base of the Camera is undefined, although it's common for it to be in either Monotonic or
+ * Realtime.
+ *
+ * <p> Timestamp may differ from timestamps that are obtained from other parts of the Camera and
+ * media systems within the same device. For example, it's common for high frequency sensors to
+ * operate based on a real-time clock, while audio/visual systems commonly operate based on a
+ * monotonic clock.
+ */
+@JvmInline
+value class CameraTimestamp(val value: Long)
+
+fun <T> Request.getOrDefault(key: Metadata.Key<T>, default: T): T = this[key] ?: default
+
+fun <T> Request.getOrDefault(key: CaptureRequest.Key<T>, default: T): T =
     this[key] ?: default
 
-public fun Request.formatForLogs(): String = "Request($streams)@${Integer.toHexString(hashCode())}"
+fun Request.formatForLogs(): String = "Request($streams)@${Integer.toHexString(hashCode())}"
+
+/** Utility function to help deal with the unsafe nature of the typed Key/Value pairs. */
+fun CaptureRequest.Builder.writeParameters(parameters: Map<*, Any?>) {
+    for ((key, value) in parameters) {
+        writeParameter(key, value)
+    }
+}
+
+/** Utility function to help deal with the unsafe nature of the typed Key/Value pairs. */
+fun CaptureRequest.Builder.writeParameter(key: Any?, value: Any?) {
+    if (key != null && key is CaptureRequest.Key<*>) {
+        @Suppress("UNCHECKED_CAST") this.set(key as CaptureRequest.Key<Any>, value)
+    }
+}
+
+/**
+ * Utility function to put all metadata in the current map through an unchecked cast. The unchecked
+ * cast is necessary since CameraGraph.Config uses Map<*, Any?> as the standard type for parameters.
+ */
+fun MutableMap<Any, Any?>.putAllMetadata(metadata: Map<*, Any?>) {
+    @Suppress("UNCHECKED_CAST") this.putAll(metadata as Map<Any, Any?>)
+}
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamFormat.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamFormat.kt
index 960357a..77a6337 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamFormat.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamFormat.kt
@@ -27,34 +27,34 @@
  */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
 @JvmInline
-public value class StreamFormat(public val value: Int) {
-    public companion object {
-        public val UNKNOWN: StreamFormat = StreamFormat(0)
-        public val PRIVATE: StreamFormat = StreamFormat(0x22)
+value class StreamFormat(val value: Int) {
+    companion object {
+        val UNKNOWN: StreamFormat = StreamFormat(0)
+        val PRIVATE: StreamFormat = StreamFormat(0x22)
 
-        public val DEPTH16: StreamFormat = StreamFormat(0x44363159)
-        public val DEPTH_JPEG: StreamFormat = StreamFormat(0x69656963)
-        public val DEPTH_POINT_CLOUD: StreamFormat = StreamFormat(0x101)
-        public val FLEX_RGB_888: StreamFormat = StreamFormat(0x29)
-        public val FLEX_RGBA_8888: StreamFormat = StreamFormat(0x2A)
-        public val HEIC: StreamFormat = StreamFormat(0x48454946)
-        public val JPEG: StreamFormat = StreamFormat(0x100)
-        public val NV16: StreamFormat = StreamFormat(0x10)
-        public val NV21: StreamFormat = StreamFormat(0x11)
-        public val RAW10: StreamFormat = StreamFormat(0x25)
-        public val RAW12: StreamFormat = StreamFormat(0x26)
-        public val RAW_DEPTH: StreamFormat = StreamFormat(0x1002)
-        public val RAW_PRIVATE: StreamFormat = StreamFormat(0x24)
-        public val RAW_SENSOR: StreamFormat = StreamFormat(0x20)
-        public val RGB_565: StreamFormat = StreamFormat(4)
-        public val Y12: StreamFormat = StreamFormat(0x32315659)
-        public val Y16: StreamFormat = StreamFormat(0x20363159)
-        public val Y8: StreamFormat = StreamFormat(0x20203859)
-        public val YUV_420_888: StreamFormat = StreamFormat(0x23)
-        public val YUV_422_888: StreamFormat = StreamFormat(0x27)
-        public val YUV_444_888: StreamFormat = StreamFormat(0x28)
-        public val YUY2: StreamFormat = StreamFormat(0x14)
-        public val YV12: StreamFormat = StreamFormat(0x32315659)
+        val DEPTH16: StreamFormat = StreamFormat(0x44363159)
+        val DEPTH_JPEG: StreamFormat = StreamFormat(0x69656963)
+        val DEPTH_POINT_CLOUD: StreamFormat = StreamFormat(0x101)
+        val FLEX_RGB_888: StreamFormat = StreamFormat(0x29)
+        val FLEX_RGBA_8888: StreamFormat = StreamFormat(0x2A)
+        val HEIC: StreamFormat = StreamFormat(0x48454946)
+        val JPEG: StreamFormat = StreamFormat(0x100)
+        val NV16: StreamFormat = StreamFormat(0x10)
+        val NV21: StreamFormat = StreamFormat(0x11)
+        val RAW10: StreamFormat = StreamFormat(0x25)
+        val RAW12: StreamFormat = StreamFormat(0x26)
+        val RAW_DEPTH: StreamFormat = StreamFormat(0x1002)
+        val RAW_PRIVATE: StreamFormat = StreamFormat(0x24)
+        val RAW_SENSOR: StreamFormat = StreamFormat(0x20)
+        val RGB_565: StreamFormat = StreamFormat(4)
+        val Y12: StreamFormat = StreamFormat(0x32315659)
+        val Y16: StreamFormat = StreamFormat(0x20363159)
+        val Y8: StreamFormat = StreamFormat(0x20203859)
+        val YUV_420_888: StreamFormat = StreamFormat(0x23)
+        val YUV_422_888: StreamFormat = StreamFormat(0x27)
+        val YUV_444_888: StreamFormat = StreamFormat(0x28)
+        val YUY2: StreamFormat = StreamFormat(0x14)
+        val YV12: StreamFormat = StreamFormat(0x32315659)
     }
 
     override fun toString(): String {
@@ -67,7 +67,7 @@
      * @return the number of bits per pixel or -1 if the format does not have a well defined number
      *   of bits per pixel.
      */
-    public val bitsPerPixel: Int
+    val bitsPerPixel: Int
         get() {
             when (this) {
                 DEPTH16 -> return 16
@@ -98,7 +98,7 @@
      *
      * @return a human readable string representation of the StreamFormat.
      */
-    public val name: String
+    val name: String
         get() {
             when (this) {
                 UNKNOWN -> return "UNKNOWN"
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamGraph.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamGraph.kt
index 0527026..dbff74d 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamGraph.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/StreamGraph.kt
@@ -24,11 +24,11 @@
  * [CameraStream]s can be used to build [Request]s that are sent to a [CameraGraph].
  */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public interface StreamGraph {
-    public val streams: List<CameraStream>
-    public val streamIds: Set<StreamId>
-    public val input: InputStream?
-    public val outputs: List<OutputStream>
+interface StreamGraph {
+    val streams: List<CameraStream>
+    val streamIds: Set<StreamId>
+    val input: InputStream?
+    val outputs: List<OutputStream>
 
-    public operator fun get(config: CameraStream.Config): CameraStream?
+    operator fun get(config: CameraStream.Config): CameraStream?
 }
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Streams.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Streams.kt
index d8b286f..d6c9790 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Streams.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/Streams.kt
@@ -66,12 +66,12 @@
  *                 \-> OutputConfig-2 -> OutputStream-2
  *   ```
  */
-public class CameraStream
-internal constructor(public val id: StreamId, public val outputs: List<OutputStream>) {
+class CameraStream
+internal constructor(val id: StreamId, val outputs: List<OutputStream>) {
     override fun toString(): String = id.toString()
 
     /** Configuration that may be used to define a [CameraStream] on a [CameraGraph] */
-    public class Config internal constructor(val outputs: List<OutputStream.Config>) {
+    class Config internal constructor(val outputs: List<OutputStream.Config>) {
         companion object {
             /** Create a simple [CameraStream] to [OutputStream] configuration */
             fun create(
@@ -117,7 +117,7 @@
  * This identifies a single surface that is used to tell the camera to produce one or more outputs.
  */
 @JvmInline
-public value class StreamId(public val value: Int) {
+value class StreamId(val value: Int) {
     override fun toString(): String = "Stream-$value"
 }
 
@@ -127,18 +127,18 @@
  * the underlying HAL on the device may produce different sized images for the same request. This
  * represents one of those potential outputs.
  */
-public interface OutputStream {
+interface OutputStream {
     // Every output comes from one, and exactly one, CameraStream
-    public val stream: CameraStream
+    val stream: CameraStream
 
-    public val id: OutputId
-    public val size: Size
-    public val format: StreamFormat
-    public val camera: CameraId
-    public val mirrorMode: MirrorMode?
-    public val timestampBase: TimestampBase?
-    public val dynamicRangeProfile: DynamicRangeProfile?
-    public val streamUseCase: StreamUseCase?
+    val id: OutputId
+    val size: Size
+    val format: StreamFormat
+    val camera: CameraId
+    val mirrorMode: MirrorMode?
+    val timestampBase: TimestampBase?
+    val dynamicRangeProfile: DynamicRangeProfile?
+    val streamUseCase: StreamUseCase?
     // TODO: Consider adding sensor mode and/or other metadata
 
     /**
@@ -146,13 +146,13 @@
      * Camera.
      */
     sealed class Config(
-        public val size: Size,
-        public val format: StreamFormat,
-        public val camera: CameraId?,
-        public val mirrorMode: MirrorMode?,
-        public val timestampBase: TimestampBase?,
-        public val dynamicRangeProfile: DynamicRangeProfile?,
-        public val streamUseCase: StreamUseCase?,
+        val size: Size,
+        val format: StreamFormat,
+        val camera: CameraId?,
+        val mirrorMode: MirrorMode?,
+        val timestampBase: TimestampBase?,
+        val dynamicRangeProfile: DynamicRangeProfile?,
+        val streamUseCase: StreamUseCase?,
     ) {
         companion object {
             fun create(
@@ -371,21 +371,21 @@
 
 /** This identifies a single output. */
 @JvmInline
-public value class OutputId(public val value: Int) {
+value class OutputId(val value: Int) {
     override fun toString(): String = "Output-$value"
 }
 
 /** Configuration for defining the properties of a Camera2 InputStream for reprocessing requests. */
-public interface InputStream {
-    public val id: InputId
-    public val format: StreamFormat
+interface InputStream {
+    val id: InputId
+    val format: StreamFormat
     // TODO: This may accept
 
-    public class Config(val stream: CameraStream.Config)
+    class Config(val stream: CameraStream.Config)
 }
 
 /** This identifies a single input. */
 @JvmInline
-public value class InputId(public val value: Int) {
+value class InputId(val value: Int) {
     override fun toString(): String = "Input-$value"
 }
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/UnsafeWrapper.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/UnsafeWrapper.kt
index 15c221d..2fc9a6b 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/UnsafeWrapper.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/UnsafeWrapper.kt
@@ -28,7 +28,7 @@
  * of the object is managed by CameraPipe.
  */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public interface UnsafeWrapper {
+interface UnsafeWrapper {
     /**
      * Attempt to unwrap this object into an underlying type.
      *
@@ -39,5 +39,5 @@
      *
      * @return unwrapped object matching T or null
      */
-    public fun <T : Any> unwrapAs(type: KClass<T>): T?
+    fun <T : Any> unwrapAs(type: KClass<T>): T?
 }
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/AndroidThreads.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/AndroidThreads.kt
index 83c29d1..499b9e0 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/AndroidThreads.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/AndroidThreads.kt
@@ -52,7 +52,7 @@
             Process.THREAD_PRIORITY_URGENT_DISPLAY // 10 (Thread.MAX_PRIORITY)
         )
 
-    public val factory: ThreadFactory = Executors.defaultThreadFactory()
+    val factory: ThreadFactory = Executors.defaultThreadFactory()
 
     /** Wraps `delegate` such that the threads created by it are set to `priority`. */
     fun ThreadFactory.withAndroidPriority(androidPriority: Int): ThreadFactory {
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Debug.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Debug.kt
index bebd791..a3130f1 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Debug.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Debug.kt
@@ -33,9 +33,9 @@
 
 /** Internal debug utilities, constants, and checks. */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public object Debug {
-    public const val ENABLE_LOGGING: Boolean = true
-    public const val ENABLE_TRACING: Boolean = true
+object Debug {
+    const val ENABLE_LOGGING: Boolean = true
+    const val ENABLE_TRACING: Boolean = true
 
     /**
      * Wrap the specified [block] in calls to [Trace.beginSection] (with the supplied [label]) and
@@ -44,7 +44,7 @@
      * @param label A name of the code section to appear in the trace.
      * @param block A block of code which is being traced.
      */
-    public inline fun <T> trace(label: String, crossinline block: () -> T): T {
+    inline fun <T> trace(label: String, crossinline block: () -> T): T {
         try {
             traceStart { label }
             return block()
@@ -54,14 +54,14 @@
     }
 
     /** Forwarding call to [Trace.beginSection] that can be statically disabled at compile time. */
-    public inline fun traceStart(crossinline label: () -> String) {
+    inline fun traceStart(crossinline label: () -> String) {
         if (ENABLE_TRACING) {
             Trace.beginSection(label())
         }
     }
 
     /** Forwarding call to [Trace.endSection] that can be statically disabled at compile time. */
-    public inline fun traceStop() {
+    inline fun traceStop() {
         if (ENABLE_TRACING) {
             Trace.endSection()
         }
@@ -89,7 +89,7 @@
         }
     }
 
-    public fun formatCameraGraphProperties(
+    fun formatCameraGraphProperties(
         metadata: CameraMetadata,
         graphConfig: CameraGraph.Config,
         cameraGraph: CameraGraph
@@ -162,32 +162,32 @@
  *
  * Example: checkApi(Build.VERSION_CODES.LOLLIPOP, "createCameraDevice")
  */
-public inline fun checkApi(requiredApi: Int, methodName: String) {
+inline fun checkApi(requiredApi: Int, methodName: String) {
     check(Build.VERSION.SDK_INT >= requiredApi) {
         "$methodName is not supported on API ${Build.VERSION.SDK_INT} (requires API $requiredApi)"
     }
 }
 
 /** Asserts that this method was invoked on Android L (API 21) or higher. */
-public inline fun checkLOrHigher(methodName: String): Unit =
+inline fun checkLOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.LOLLIPOP, methodName)
 
 /** Asserts that this method was invoked on Android M (API 23) or higher. */
-public inline fun checkMOrHigher(methodName: String): Unit =
+inline fun checkMOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.M, methodName)
 
 /** Asserts that this method was invoked on Android N (API 24) or higher. */
-public inline fun checkNOrHigher(methodName: String): Unit =
+inline fun checkNOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.N, methodName)
 
 /** Asserts that this method was invoked on Android O (API 26) or higher. */
-public inline fun checkOOrHigher(methodName: String): Unit =
+inline fun checkOOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.O, methodName)
 
 /** Asserts that this method was invoked on Android P (API 28) or higher. */
-public inline fun checkPOrHigher(methodName: String): Unit =
+inline fun checkPOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.P, methodName)
 
 /** Asserts that this method was invoked on Android Q (API 29) or higher. */
-public inline fun checkQOrHigher(methodName: String): Unit =
+inline fun checkQOrHigher(methodName: String): Unit =
     checkApi(Build.VERSION_CODES.Q, methodName)
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Log.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Log.kt
index 0f301fd..0e1423e 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Log.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Log.kt
@@ -29,8 +29,8 @@
  * Log.debug { "This is a log message with a $value" }
  */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public object Log {
-    public const val TAG: String = "CXCP"
+object Log {
+    const val TAG: String = "CXCP"
 
     private const val LOG_LEVEL_DEBUG = 1
     private const val LOG_LEVEL_INFO = 2
@@ -40,28 +40,28 @@
     // This indicates the lowest log level that will always log.
     private const val LOG_LEVEL = LOG_LEVEL_DEBUG
 
-    public val DEBUG_LOGGABLE: Boolean =
+    val DEBUG_LOGGABLE: Boolean =
         LOG_LEVEL <= LOG_LEVEL_DEBUG || Log.isLoggable(TAG, Log.DEBUG)
-    public val INFO_LOGGABLE: Boolean = LOG_LEVEL <= LOG_LEVEL_INFO || Log.isLoggable(TAG, Log.INFO)
-    public val WARN_LOGGABLE: Boolean = LOG_LEVEL <= LOG_LEVEL_WARN || Log.isLoggable(TAG, Log.WARN)
-    public val ERROR_LOGGABLE: Boolean =
+    val INFO_LOGGABLE: Boolean = LOG_LEVEL <= LOG_LEVEL_INFO || Log.isLoggable(TAG, Log.INFO)
+    val WARN_LOGGABLE: Boolean = LOG_LEVEL <= LOG_LEVEL_WARN || Log.isLoggable(TAG, Log.WARN)
+    val ERROR_LOGGABLE: Boolean =
         LOG_LEVEL <= LOG_LEVEL_ERROR || Log.isLoggable(TAG, Log.ERROR)
 
     /** Debug functions log noisy information related to the internals of the system. */
-    public inline fun debug(crossinline msg: () -> String) {
+    inline fun debug(crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && DEBUG_LOGGABLE) Log.d(TAG, msg())
     }
 
-    public inline fun debug(throwable: Throwable, crossinline msg: () -> String) {
+    inline fun debug(throwable: Throwable, crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && DEBUG_LOGGABLE) Log.d(TAG, msg(), throwable)
     }
 
     /** Info functions log standard, useful information about the state of the system. */
-    public inline fun info(crossinline msg: () -> String) {
+    inline fun info(crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && INFO_LOGGABLE) Log.i(TAG, msg())
     }
 
-    public inline fun info(throwable: Throwable, crossinline msg: () -> String) {
+    inline fun info(throwable: Throwable, crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && INFO_LOGGABLE) Log.i(TAG, msg(), throwable)
     }
 
@@ -69,27 +69,27 @@
      * Warning functions are used when something unexpected may lead to a crash or fatal exception
      * later on as a result if the unusual circumstances
      */
-    public inline fun warn(crossinline msg: () -> String) {
+    inline fun warn(crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && WARN_LOGGABLE) Log.w(TAG, msg())
     }
 
-    public inline fun warn(throwable: Throwable, crossinline msg: () -> String) {
+    inline fun warn(throwable: Throwable, crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && WARN_LOGGABLE) Log.w(TAG, msg(), throwable)
     }
 
     /**
      * Error functions are reserved for something unexpected that will lead to a crash or data loss.
      */
-    public inline fun error(crossinline msg: () -> String) {
+    inline fun error(crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && ERROR_LOGGABLE) Log.e(TAG, msg())
     }
 
-    public inline fun error(throwable: Throwable, crossinline msg: () -> String) {
+    inline fun error(throwable: Throwable, crossinline msg: () -> String) {
         if (Debug.ENABLE_LOGGING && ERROR_LOGGABLE) Log.e(TAG, msg(), throwable)
     }
 
     /** Read the stack trace of a calling method and join it to a formatted string. */
-    public fun readStackTrace(limit: Int = 4): String {
+    fun readStackTrace(limit: Int = 4): String {
         val elements = Thread.currentThread().stackTrace
         // Ignore the first 3 elements, which ignores:
         // VMStack.getThreadStackTrace
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Timestamps.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Timestamps.kt
index a3c97b7..eae40c0 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Timestamps.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/core/Timestamps.kt
@@ -26,22 +26,22 @@
 
 /** A nanosecond timestamp */
 @JvmInline
-public value class TimestampNs constructor(public val value: Long) {
-    public inline operator fun minus(other: TimestampNs): DurationNs =
+value class TimestampNs constructor(val value: Long) {
+    inline operator fun minus(other: TimestampNs): DurationNs =
         DurationNs(value - other.value)
 
-    public inline operator fun plus(other: DurationNs): TimestampNs =
+    inline operator fun plus(other: DurationNs): TimestampNs =
         TimestampNs(value + other.value)
 }
 
 @JvmInline
-public value class DurationNs(public val value: Long) {
-    public inline operator fun minus(other: DurationNs): DurationNs =
+value class DurationNs(val value: Long) {
+    inline operator fun minus(other: DurationNs): DurationNs =
         DurationNs(value - other.value)
 
-    public inline operator fun plus(other: DurationNs): DurationNs = DurationNs(value + other.value)
+    inline operator fun plus(other: DurationNs): DurationNs = DurationNs(value + other.value)
 
-    public inline operator fun plus(other: TimestampNs): TimestampNs =
+    inline operator fun plus(other: TimestampNs): TimestampNs =
         TimestampNs(value + other.value)
 
     operator fun compareTo(other: DurationNs): Int {
@@ -55,30 +55,30 @@
     }
 
     companion object {
-        public inline fun fromMs(durationMs: Long) = DurationNs(durationMs * 1_000_000L)
+        inline fun fromMs(durationMs: Long) = DurationNs(durationMs * 1_000_000L)
     }
 }
 
 interface TimeSource {
-    public fun now(): TimestampNs
+    fun now(): TimestampNs
 }
 
 @Singleton
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public class SystemTimeSource @Inject constructor() : TimeSource {
+class SystemTimeSource @Inject constructor() : TimeSource {
     override fun now() = TimestampNs(SystemClock.elapsedRealtimeNanos())
 }
 
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public object Timestamps {
-    public inline fun now(timeSource: TimeSource): TimestampNs = timeSource.now()
+object Timestamps {
+    inline fun now(timeSource: TimeSource): TimestampNs = timeSource.now()
 
-    public inline fun DurationNs.formatNs(): String = "$this ns"
-    public inline fun DurationNs.formatMs(decimals: Int = 3): String =
+    inline fun DurationNs.formatNs(): String = "$this ns"
+    inline fun DurationNs.formatMs(decimals: Int = 3): String =
         "%.${decimals}f ms".format(null, this.value / 1_000_000.0)
 
-    public inline fun TimestampNs.formatNs(): String = "$this ns"
-    public inline fun TimestampNs.formatMs(): String = "${this.value / 1_000_000} ms"
-    public inline fun TimestampNs.measureNow(timeSource: TimeSource = SystemTimeSource()) =
+    inline fun TimestampNs.formatNs(): String = "$this ns"
+    inline fun TimestampNs.formatMs(): String = "${this.value / 1_000_000} ms"
+    inline fun TimestampNs.measureNow(timeSource: TimeSource = SystemTimeSource()) =
         now(timeSource) - this
 }
diff --git a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/graph/GraphRequestProcessor.kt b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/graph/GraphRequestProcessor.kt
index 9bde838..bde6051 100644
--- a/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/graph/GraphRequestProcessor.kt
+++ b/camera/camera-camera2-pipe/src/main/java/androidx/camera/camera2/pipe/graph/GraphRequestProcessor.kt
@@ -37,7 +37,7 @@
  */
 @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
 @Suppress("NOTHING_TO_INLINE")
-public class GraphRequestProcessor
+class GraphRequestProcessor
 private constructor(
     private val captureSequenceProcessor: CaptureSequenceProcessor<Any, CaptureSequence<Any>>
 ) {