Merge remote-tracking branch 'goog/upstream-main' into sync

Bug: 210138227
Test: presubmits
Change-Id: I0c2ea271a28bb3b7543b99054f1d83fd0abc7faf
diff --git a/PIGWEED_MODULES b/PIGWEED_MODULES
index 9912ec7..dfb7f27 100644
--- a/PIGWEED_MODULES
+++ b/PIGWEED_MODULES
@@ -92,6 +92,7 @@
 pw_sys_io_arduino
 pw_sys_io_baremetal_lm3s6965evb
 pw_sys_io_baremetal_stm32f429
+pw_sys_io_emcraft_sf2
 pw_sys_io_mcuxpresso
 pw_sys_io_stdio
 pw_sys_io_stm32cube
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 77a4bf1..beb43f1 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -373,8 +373,8 @@
 To speed up the review process, consider adding :ref:`module-pw_presubmit` as a
 git push hook using the following command:
 
-**Linux/macOS**
-
+Linux/macOS
+^^^^^^^^^^^
 .. code:: bash
 
   $ pw presubmit --install
@@ -398,6 +398,18 @@
 
   $ git push origin HEAD:refs/for/main --no-verify
 
+Presubmit and branch management
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+When creating new feature branches, make sure to specify the upstream branch to
+track, e.g.
+
+.. code:: bash
+
+  $ git checkout -b myfeature origin/main
+
+When tracking an upstream branch, ``pw presubmit`` will only run checks on the
+modified files, rather than the entire repository.
+
 .. _Sphinx: https://www.sphinx-doc.org/
 
 .. inclusive-language: disable
diff --git a/pw_build/generated_pigweed_modules_lists.gni b/pw_build/generated_pigweed_modules_lists.gni
index e5765ee..f3d3627 100644
--- a/pw_build/generated_pigweed_modules_lists.gni
+++ b/pw_build/generated_pigweed_modules_lists.gni
@@ -126,6 +126,8 @@
       get_path_info("../pw_sys_io_baremetal_lm3s6965evb", "abspath")
   dir_pw_sys_io_baremetal_stm32f429 =
       get_path_info("../pw_sys_io_baremetal_stm32f429", "abspath")
+  dir_pw_sys_io_emcraft_sf2 =
+      get_path_info("../pw_sys_io_emcraft_sf2", "abspath")
   dir_pw_sys_io_mcuxpresso = get_path_info("../pw_sys_io_mcuxpresso", "abspath")
   dir_pw_sys_io_stdio = get_path_info("../pw_sys_io_stdio", "abspath")
   dir_pw_sys_io_stm32cube = get_path_info("../pw_sys_io_stm32cube", "abspath")
@@ -254,6 +256,7 @@
     dir_pw_sys_io_arduino,
     dir_pw_sys_io_baremetal_lm3s6965evb,
     dir_pw_sys_io_baremetal_stm32f429,
+    dir_pw_sys_io_emcraft_sf2,
     dir_pw_sys_io_mcuxpresso,
     dir_pw_sys_io_stdio,
     dir_pw_sys_io_stm32cube,
@@ -439,6 +442,7 @@
     "$dir_pw_sys_io:docs",
     "$dir_pw_sys_io_arduino:docs",
     "$dir_pw_sys_io_baremetal_stm32f429:docs",
+    "$dir_pw_sys_io_emcraft_sf2:docs",
     "$dir_pw_sys_io_mcuxpresso:docs",
     "$dir_pw_sys_io_stdio:docs",
     "$dir_pw_sys_io_stm32cube:docs",
diff --git a/pw_console/py/pw_console/quit_dialog.py b/pw_console/py/pw_console/quit_dialog.py
index e37ebc3..b466580 100644
--- a/pw_console/py/pw_console/quit_dialog.py
+++ b/pw_console/py/pw_console/quit_dialog.py
@@ -16,7 +16,8 @@
 from __future__ import annotations
 import functools
 import logging
-from typing import TYPE_CHECKING
+import sys
+from typing import Optional, Callable, TYPE_CHECKING
 
 from prompt_toolkit.data_structures import Point
 from prompt_toolkit.key_binding import KeyBindings, KeyPressEvent
@@ -44,13 +45,18 @@
 
     DIALOG_HEIGHT = 2
 
-    def __init__(self, application: ConsoleApp):
+    def __init__(self,
+                 application: ConsoleApp,
+                 on_quit: Optional[Callable] = None):
         self.application = application
         self.show_dialog = False
         # Tracks the last focused container, to enable restoring focus after
         # closing the dialog.
         self.last_focused_pane = None
 
+        self.on_quit_function = (on_quit if on_quit else
+                                 self._default_on_quit_function)
+
         # Quit keybindings are active when this dialog is in focus
         key_bindings = KeyBindings()
         register = self.application.prefs.register_keybinding
@@ -114,8 +120,14 @@
         self.focus_self()
         self.application.redraw_ui()
 
+    def _default_on_quit_function(self):
+        if hasattr(self.application, 'application'):
+            self.application.application.exit()
+        else:
+            sys.exit()
+
     def quit_action(self):
-        self.application.application.exit()
+        self.on_quit_function()
 
     def get_action_fragments(self):
         """Return FormattedText with action buttons."""
diff --git a/pw_console/py/pw_console/widgets/border.py b/pw_console/py/pw_console/widgets/border.py
index 64a03a3..0cf1170 100644
--- a/pw_console/py/pw_console/widgets/border.py
+++ b/pw_console/py/pw_console/widgets/border.py
@@ -13,7 +13,7 @@
 # the License.
 """Wrapper fuctions to add borders around prompt_toolkit containers."""
 
-from typing import List, Optional
+from typing import Callable, List, Optional, Union
 
 from prompt_toolkit.layout import (
     AnyContainer,
@@ -29,8 +29,8 @@
     content: AnyContainer,
     content_height: Optional[int] = None,
     title: str = '',
-    border_style: str = '',
-    base_style: str = '',
+    border_style: Union[Callable[[], str], str] = '',
+    base_style: Union[Callable[[], str], str] = '',
     top: bool = True,
     bottom: bool = True,
     left: bool = True,
diff --git a/pw_crypto/ecdsa_boringssl.cc b/pw_crypto/ecdsa_boringssl.cc
index 4556950..4d361e3 100644
--- a/pw_crypto/ecdsa_boringssl.cc
+++ b/pw_crypto/ecdsa_boringssl.cc
@@ -11,7 +11,7 @@
 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 // License for the specific language governing permissions and limitations under
 // the License.
-#define PW_LOG_MODULE_NAME "ECDSA"
+#define PW_LOG_MODULE_NAME "ECDSA-BSSL"
 #define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
 #include "openssl/bn.h"
diff --git a/pw_crypto/ecdsa_mbedtls.cc b/pw_crypto/ecdsa_mbedtls.cc
index 92e6258..dd68eac 100644
--- a/pw_crypto/ecdsa_mbedtls.cc
+++ b/pw_crypto/ecdsa_mbedtls.cc
@@ -11,7 +11,7 @@
 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 // License for the specific language governing permissions and limitations under
 // the License.
-#define PW_LOG_MODULE_NAME "ECDSA"
+#define PW_LOG_MODULE_NAME "ECDSA-MTLS"
 #define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
 #include "mbedtls/ecdsa.h"
diff --git a/pw_crypto/ecdsa_uecc.cc b/pw_crypto/ecdsa_uecc.cc
index 25e37b1..937d79d 100644
--- a/pw_crypto/ecdsa_uecc.cc
+++ b/pw_crypto/ecdsa_uecc.cc
@@ -11,7 +11,7 @@
 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 // License for the specific language governing permissions and limitations under
 // the License.
-#define PW_LOG_MODULE_NAME "ECDSA"
+#define PW_LOG_MODULE_NAME "ECDSA-UECC"
 #define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
 #include "pw_crypto/ecdsa.h"
diff --git a/pw_crypto/public/pw_crypto/sha256.h b/pw_crypto/public/pw_crypto/sha256.h
index cce159a..1389e28 100644
--- a/pw_crypto/public/pw_crypto/sha256.h
+++ b/pw_crypto/public/pw_crypto/sha256.h
@@ -14,9 +14,6 @@
 
 #pragma once
 
-#define PW_LOG_MODULE_NAME "SHA256"
-#define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
-
 #include <cstdint>
 
 #include "pw_bytes/span.h"
diff --git a/pw_crypto/sha256_boringssl.cc b/pw_crypto/sha256_boringssl.cc
index ba558b1..fb9c783 100644
--- a/pw_crypto/sha256_boringssl.cc
+++ b/pw_crypto/sha256_boringssl.cc
@@ -11,6 +11,8 @@
 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 // License for the specific language governing permissions and limitations under
 // the License.
+#define PW_LOG_MODULE_NAME "SHA256-BSSL"
+#define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
 #include "pw_crypto/sha256.h"
 #include "pw_status/status.h"
diff --git a/pw_crypto/sha256_mbedtls.cc b/pw_crypto/sha256_mbedtls.cc
index 0e9c489..8cb6595 100644
--- a/pw_crypto/sha256_mbedtls.cc
+++ b/pw_crypto/sha256_mbedtls.cc
@@ -11,6 +11,8 @@
 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 // License for the specific language governing permissions and limitations under
 // the License.
+#define PW_LOG_MODULE_NAME "SHA256-MTLS"
+#define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
 #include "pw_crypto/sha256.h"
 #include "pw_status/status.h"
diff --git a/pw_env_setup/py/pw_env_setup/cipd_setup/pigweed.json b/pw_env_setup/py/pw_env_setup/cipd_setup/pigweed.json
index 77a3a71..6d89385 100644
--- a/pw_env_setup/py/pw_env_setup/cipd_setup/pigweed.json
+++ b/pw_env_setup/py/pw_env_setup/cipd_setup/pigweed.json
@@ -36,7 +36,7 @@
         "windows-amd64"
       ],
       "tags": [
-        "version:3.23.20220319-g6657551"
+        "version:3.23.20220402-g6733ad4"
       ],
       "version_file": ".versions/cmake.cipd_version"
     },
@@ -140,7 +140,7 @@
         "windows-amd64"
       ],
       "tags": [
-        "git_revision:194c5e100619fbbd1e3ace7347aaa647c39e143f"
+        "git_revision:3f30a2e1ac848bd3dde1322ee2ace4d4f935c29d"
       ],
       "version_file": ".versions/host_tools.cipd_version"
     },
diff --git a/pw_hdlc/py/pw_hdlc/rpc.py b/pw_hdlc/py/pw_hdlc/rpc.py
index 786f097..b412753 100644
--- a/pw_hdlc/py/pw_hdlc/rpc.py
+++ b/pw_hdlc/py/pw_hdlc/rpc.py
@@ -139,7 +139,8 @@
                  output: Callable[[bytes], Any] = write_to_file,
                  client_impl: pw_rpc.client.ClientImpl = None,
                  *,
-                 _incoming_packet_filter_for_testing: '_PacketFilter' = None):
+                 _incoming_packet_filter_for_testing: pw_rpc.
+                 ChannelManipulator = None):
         """Creates an RPC client configured to communicate using HDLC.
 
         Args:
@@ -159,10 +160,13 @@
         self.client = pw_rpc.Client.from_modules(client_impl, channels,
                                                  self.protos.modules())
 
-        self._test_filter = _incoming_packet_filter_for_testing
+        rpc_output: Callable[[bytes], Any] = self._handle_rpc_packet
+        if _incoming_packet_filter_for_testing is not None:
+            _incoming_packet_filter_for_testing.send_packet = rpc_output
+            rpc_output = _incoming_packet_filter_for_testing
 
         frame_handlers: FrameHandlers = {
-            DEFAULT_ADDRESS: self._handle_rpc_packet,
+            DEFAULT_ADDRESS: lambda frame: rpc_output(frame.data),
             STDOUT_ADDRESS: lambda frame: output(frame.data),
         }
 
@@ -184,12 +188,9 @@
 
         return self.client.channel(channel_id).rpcs
 
-    def _handle_rpc_packet(self, frame: Frame) -> None:
-        if self._test_filter and not self._test_filter.keep_packet(frame.data):
-            return
-
-        if not self.client.process_packet(frame.data):
-            _LOG.error('Packet not handled by RPC client: %s', frame.data)
+    def _handle_rpc_packet(self, packet: bytes) -> None:
+        if not self.client.process_packet(packet):
+            _LOG.error('Packet not handled by RPC client: %s', packet)
 
 
 def _try_connect(port: int, attempts: int = 10) -> socket.socket:
@@ -241,16 +242,21 @@
         self.close()
 
 
-class _PacketFilter:
+class PacketFilter(pw_rpc.ChannelManipulator):
     """Determines if a packet should be kept or dropped for testing purposes."""
     _Action = Callable[[int], Tuple[bool, bool]]
     _KEEP = lambda _: (True, False)
     _DROP = lambda _: (False, False)
 
     def __init__(self, name: str) -> None:
+        super().__init__()
         self.name = name
         self.packet_count = 0
-        self._actions: Deque[_PacketFilter._Action] = collections.deque()
+        self._actions: Deque[PacketFilter._Action] = collections.deque()
+
+    def process_and_send(self, packet: bytes):
+        if self.keep_packet(packet):
+            self.send_packet(packet)
 
     def reset(self) -> None:
         self.packet_count = 0
@@ -258,11 +264,11 @@
 
     def keep(self, count: int) -> None:
         """Keeps the next count packets."""
-        self._actions.extend(_PacketFilter._KEEP for _ in range(count))
+        self._actions.extend(PacketFilter._KEEP for _ in range(count))
 
     def drop(self, count: int) -> None:
         """Drops the next count packets."""
-        self._actions.extend(_PacketFilter._DROP for _ in range(count))
+        self._actions.extend(PacketFilter._DROP for _ in range(count))
 
     def drop_every(self, every: int) -> None:
         """Drops every Nth packet forever."""
@@ -290,33 +296,21 @@
         return keep
 
 
-class _TestChannelOutput:
-    def __init__(self, send: Callable[[bytes], Any]) -> None:
-        self._send = send
-        self.packets = _PacketFilter('outgoing RPC')
-
-    def __call__(self, data: bytes) -> None:
-        if self.packets.keep_packet(data):
-            self._send(data)
-
-
 class HdlcRpcLocalServerAndClient:
     """Runs an RPC server in a subprocess and connects to it over a socket.
 
     This can be used to run a local RPC server in an integration test.
     """
-    def __init__(self,
-                 server_command: Sequence,
-                 port: int,
-                 protos: PathsModulesOrProtoLibrary,
-                 *,
-                 for_testing: bool = False) -> None:
-        """Creates a new HdlcRpcLocalServerAndClient.
-
-        If for_testing=True, the HdlcRpcLocalServerAndClient will have
-        outgoing_packets and incoming_packets _PacketFilter members that can be
-        used to program packet loss for testing purposes.
-        """
+    def __init__(
+        self,
+        server_command: Sequence,
+        port: int,
+        protos: PathsModulesOrProtoLibrary,
+        *,
+        incoming_processor: Optional[pw_rpc.ChannelManipulator] = None,
+        outgoing_processor: Optional[pw_rpc.ChannelManipulator] = None
+    ) -> None:
+        """Creates a new HdlcRpcLocalServerAndClient."""
 
         self.server = SocketSubprocess(server_command, port)
 
@@ -327,20 +321,18 @@
         self.output = io.BytesIO()
 
         self.channel_output: Any = self.server.socket.sendall
-        if for_testing:
-            self.channel_output = _TestChannelOutput(self.channel_output)
-            self.outgoing_packets = self.channel_output.packets
-            self.incoming_packets = _PacketFilter('incoming RPC')
-            incoming_filter: Optional[_PacketFilter] = self.incoming_packets
-        else:
-            incoming_filter = None
+
+        self._incoming_processor = incoming_processor
+        if outgoing_processor is not None:
+            outgoing_processor.send_packet = self.channel_output
+            self.channel_output = outgoing_processor
 
         self.client = HdlcRpcClient(
             self._bytes_queue.get,
             protos,
             default_channels(self.channel_output),
             self.output.write,
-            _incoming_packet_filter_for_testing=incoming_filter).client
+            _incoming_packet_filter_for_testing=incoming_processor).client
 
     def _read_from_socket(self):
         while True:
diff --git a/pw_hdlc/py/pw_hdlc/rpc_console.py b/pw_hdlc/py/pw_hdlc/rpc_console.py
index 27d4028..38bc6e0 100644
--- a/pw_hdlc/py/pw_hdlc/rpc_console.py
+++ b/pw_hdlc/py/pw_hdlc/rpc_console.py
@@ -256,7 +256,11 @@
         serial_impl = SerialWithLogging
 
     if socket_addr is None:
-        serial_device = serial_impl(device, baudrate, timeout=1)
+        serial_device = serial_impl(
+            device,
+            baudrate,
+            timeout=0,  # Non-blocking mode
+        )
         read = lambda: serial_device.read(8192)
         write = serial_device.write
     else:
diff --git a/pw_log/Android.bp b/pw_log/Android.bp
index 64a6020..d9cadd4 100644
--- a/pw_log/Android.bp
+++ b/pw_log/Android.bp
@@ -19,4 +19,11 @@
     export_include_dirs: [
         "public",
     ],
-}
\ No newline at end of file
+}
+
+android_library {
+    name: "pw_log_android_java",
+    srcs: ["java/android_main/dev/pigweed/pw_log/*.java"],
+    visibility: ["//visibility:public"],
+    sdk_version: "current",
+}
diff --git a/pw_log/AndroidManifest.xml b/pw_log/AndroidManifest.xml
new file mode 100644
index 0000000..6b359c2
--- /dev/null
+++ b/pw_log/AndroidManifest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Copyright 2022 The Pigweed Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License"); you may not
+   use this file except in compliance with the License. You may obtain a copy of
+   the License at
+
+   https://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+   WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+   License for the specific language governing permissions and limitations under
+   the License.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dev.pigweed.pw_log" >
+
+    <uses-sdk
+        android:minSdkVersion="14"
+        android:targetSdkVersion="32" />
+</manifest>
diff --git a/pw_log/docs.rst b/pw_log/docs.rst
index 6c30cd8..b3eac43 100644
--- a/pw_log/docs.rst
+++ b/pw_log/docs.rst
@@ -412,3 +412,14 @@
 something that are typically added on a per-log-statement basis, and is why the
 flags are added on a per-call basis (though hidden through the high-level
 macros).
+
+--------------
+pw_log in Java
+--------------
+``pw_log`` provides a thin Java logging class that uses Google's `Flogger
+<https://google.github.io/flogger/>`_ API. The purpose of this wrapper is to
+support logging on platforms that do not support Flogger. The main
+implementation in ``pw_log/java/main`` simply wraps a
+``com.google.common.flogger.FluentLogger``. An implementation that logs to
+Android's ``android.util.Log`` instead is provided in
+``pw_log/java/android_main``.
diff --git a/pw_log/java/android_main/dev/pigweed/pw_log/Logger.java b/pw_log/java/android_main/dev/pigweed/pw_log/Logger.java
new file mode 100644
index 0000000..3390011
--- /dev/null
+++ b/pw_log/java/android_main/dev/pigweed/pw_log/Logger.java
@@ -0,0 +1,103 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+package dev.pigweed.pw_log;
+
+import android.util.Log;
+import java.util.logging.Level;
+
+/**
+ * Partial implementation of the com.google.common.flogger.FluentLogger API that
+ * logs to android.util.Log.
+ */
+public final class Logger {
+  private final String tag;
+
+  public final class AndroidLogApi {
+    private final int level;
+
+    private Throwable cause = null;
+
+    private AndroidLogApi(Level level) {
+      if (level == Level.FINEST || level == Level.FINER) {
+        this.level = Log.VERBOSE;
+      } else if (level == Level.FINE || level == Level.CONFIG) {
+        this.level = Log.DEBUG;
+      } else if (level == Level.WARNING) {
+        this.level = Log.WARN;
+      } else if (level == Level.SEVERE) {
+        this.level = Log.ERROR;
+      } else {
+        this.level = Log.INFO;
+      }
+    }
+
+    public AndroidLogApi withCause(Throwable cause) {
+      this.cause = cause;
+      return this;
+    }
+
+    public void log(String message) {
+      if (cause != null) {
+        message = String.format("%s: %s", cause, message);
+      }
+
+      Log.println(level, tag, message);
+    }
+
+    public void log(String message, Object... args) {
+      log(String.format(message, args));
+    }
+  }
+
+  public static Logger forClass(Class<?> enclosingClass) {
+    return new Logger(enclosingClass.getSimpleName());
+  }
+
+  private Logger(String tag) {
+    this.tag = tag;
+  }
+
+  public AndroidLogApi at(Level level) {
+    return new AndroidLogApi(level);
+  }
+
+  public AndroidLogApi atSevere() {
+    return at(Level.SEVERE);
+  }
+
+  public AndroidLogApi atWarning() {
+    return at(Level.WARNING);
+  }
+
+  public AndroidLogApi atInfo() {
+    return at(Level.INFO);
+  }
+
+  public AndroidLogApi atConfig() {
+    return at(Level.CONFIG);
+  }
+
+  public AndroidLogApi atFine() {
+    return at(Level.FINE);
+  }
+
+  public AndroidLogApi atFiner() {
+    return at(Level.FINER);
+  }
+
+  public AndroidLogApi atFinest() {
+    return at(Level.FINEST);
+  }
+}
diff --git a/pw_log/java/main/dev/pigweed/pw_log/BUILD.bazel b/pw_log/java/main/dev/pigweed/pw_log/BUILD.bazel
new file mode 100644
index 0000000..8ebacb0
--- /dev/null
+++ b/pw_log/java/main/dev/pigweed/pw_log/BUILD.bazel
@@ -0,0 +1,25 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+# Logging API that maps to Google's Flogger (https://google.github.io/flogger/),
+# or an alternate API if Flogger is not supported.
+
+java_library(
+    name = "pw_log",
+    srcs = ["Logger.java"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "@maven//:com_google_flogger_flogger",
+    ],
+)
diff --git a/pw_log/java/main/dev/pigweed/pw_log/Logger.java b/pw_log/java/main/dev/pigweed/pw_log/Logger.java
new file mode 100644
index 0000000..f1ba744
--- /dev/null
+++ b/pw_log/java/main/dev/pigweed/pw_log/Logger.java
@@ -0,0 +1,71 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+package dev.pigweed.pw_log;
+
+import com.google.common.flogger.FluentLogger;
+import java.util.logging.Level;
+
+/**
+ * Partial implementation of the com.google.common.flogger.FluentLogger API that
+ * wraps a FluentLogger instance.
+ *
+ * This class is used instead of directly logging to FluentLogger to support
+ * swapping the implementation on systems that don't support FluentLogger (i.e.
+ * Android).
+ */
+@SuppressWarnings("FloggerSplitLogStatement")
+public final class Logger {
+  private final FluentLogger wrappedLogger;
+
+  public static Logger forClass(Class<?> enclosingClass) {
+    return new Logger(FluentLogger.forEnclosingClass());
+  }
+
+  private Logger(FluentLogger fluentLogger) {
+    this.wrappedLogger = fluentLogger;
+  }
+
+  public FluentLogger.Api at(Level level) {
+    return wrappedLogger.at(level);
+  }
+
+  public FluentLogger.Api atSevere() {
+    return at(Level.SEVERE);
+  }
+
+  public FluentLogger.Api atWarning() {
+    return at(Level.WARNING);
+  }
+
+  public FluentLogger.Api atInfo() {
+    return at(Level.INFO);
+  }
+
+  public FluentLogger.Api atConfig() {
+    return at(Level.CONFIG);
+  }
+
+  public FluentLogger.Api atFine() {
+    return at(Level.FINE);
+  }
+
+  public FluentLogger.Api atFiner() {
+    return at(Level.FINER);
+  }
+
+  public FluentLogger.Api atFinest() {
+    return at(Level.FINEST);
+  }
+}
diff --git a/pw_package/py/BUILD.gn b/pw_package/py/BUILD.gn
index bfc46a8..1af696e 100644
--- a/pw_package/py/BUILD.gn
+++ b/pw_package/py/BUILD.gn
@@ -38,6 +38,7 @@
     "pw_package/packages/nanopb.py",
     "pw_package/packages/pico_sdk.py",
     "pw_package/packages/protobuf.py",
+    "pw_package/packages/smartfusion_mss.py",
     "pw_package/packages/stm32cube.py",
     "pw_package/pigweed_packages.py",
   ]
diff --git a/pw_package/py/pw_package/packages/smartfusion_mss.py b/pw_package/py/pw_package/packages/smartfusion_mss.py
new file mode 100644
index 0000000..b9dbe49
--- /dev/null
+++ b/pw_package/py/pw_package/packages/smartfusion_mss.py
@@ -0,0 +1,40 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Install and check status of SmartFusion MSS."""
+
+import pathlib
+from typing import Sequence
+
+import pw_package.git_repo
+import pw_package.package_manager
+
+
+class SmartfusionMss(pw_package.git_repo.GitRepo):
+    """Install and check status of SmartFusion MSS."""
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args,
+                         name='smartfusion_mss',
+                         url='https://github.com/seank/smartfusion_mss',
+                         commit='9f47db73d3df786eab04d082645da5e735e63d28',
+                         **kwargs)
+
+    def info(self, path: pathlib.Path) -> Sequence[str]:
+        return (
+            f'{self.name} installed in: {path}',
+            "Enable by running 'gn args out' and adding this line:",
+            f'  dir_pw_third_party_smartfusion_mss = "{path}"',
+        )
+
+
+pw_package.package_manager.register(SmartfusionMss)
diff --git a/pw_package/py/pw_package/pigweed_packages.py b/pw_package/py/pw_package/pigweed_packages.py
index 59e0919..c60468c 100644
--- a/pw_package/py/pw_package/pigweed_packages.py
+++ b/pw_package/py/pw_package/pigweed_packages.py
@@ -27,6 +27,7 @@
 from pw_package.packages import nanopb
 from pw_package.packages import pico_sdk  # pylint: disable=unused-import
 from pw_package.packages import protobuf  # pylint: disable=unused-import
+from pw_package.packages import smartfusion_mss  # pylint: disable=unused-import
 from pw_package.packages import stm32cube  # pylint: disable=unused-import
 
 
diff --git a/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py b/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py
index a0cbe83..d7b65d3 100755
--- a/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py
+++ b/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py
@@ -532,6 +532,7 @@
     r'\.json$',
     r'\.png$',
     r'\.svg$',
+    r'\.xml$',
     # Documentation
     r'\.md$',
     r'\.rst$',
diff --git a/pw_rpc/Android.bp b/pw_rpc/Android.bp
index aeca94b..3913566 100644
--- a/pw_rpc/Android.bp
+++ b/pw_rpc/Android.bp
@@ -14,7 +14,6 @@
 
 java_library {
     name: "pw_rpc_java_client",
-    host_supported: true,
     srcs: ["java/main/dev/pigweed/pw_rpc/*.java"],
     visibility: ["//visibility:public"],
     static_libs: [
@@ -25,6 +24,7 @@
         "guava",
         "jsr305",
         "libprotobuf-java-lite",
+        "pw_log_android_java",
     ],
     plugins: ["auto_value_plugin"],
     sdk_version: "current",
diff --git a/pw_rpc/channel.cc b/pw_rpc/channel.cc
index 2167cbc..ea74168 100644
--- a/pw_rpc/channel.cc
+++ b/pw_rpc/channel.cc
@@ -18,20 +18,42 @@
 #include "pw_rpc/internal/channel.h"
 // clang-format on
 
+#include "pw_bytes/span.h"
 #include "pw_log/log.h"
+#include "pw_protobuf/decoder.h"
 #include "pw_rpc/internal/config.h"
 
-namespace pw::rpc::internal {
-
+namespace pw::rpc {
 namespace {
 
 // TODO(pwbug/615): Dynamically allocate this buffer if
 //     PW_RPC_DYNAMIC_ALLOCATION is enabled.
 std::array<std::byte, cfg::kEncodingBufferSizeBytes> encoding_buffer
-    PW_GUARDED_BY(rpc_lock());
+    PW_GUARDED_BY(internal::rpc_lock());
 
 }  // namespace
 
+Result<uint32_t> ExtractChannelId(ConstByteSpan packet) {
+  protobuf::Decoder decoder(packet);
+
+  while (decoder.Next().ok()) {
+    switch (static_cast<internal::RpcPacket::Fields>(decoder.FieldNumber())) {
+      case internal::RpcPacket::Fields::CHANNEL_ID: {
+        uint32_t channel_id;
+        PW_TRY(decoder.ReadUint32(&channel_id));
+        return channel_id;
+      }
+
+      default:
+        continue;
+    }
+  }
+
+  return Status::DataLoss();
+}
+
+namespace internal {
+
 ByteSpan GetPayloadBuffer() PW_EXCLUSIVE_LOCKS_REQUIRED(rpc_lock()) {
   return ByteSpan(encoding_buffer)
       .subspan(Packet::kMinEncodedSizeWithoutPayload);
@@ -61,4 +83,5 @@
   return OkStatus();
 }
 
-}  // namespace pw::rpc::internal
+}  // namespace internal
+}  // namespace pw::rpc
diff --git a/pw_rpc/channel_test.cc b/pw_rpc/channel_test.cc
index 185d257..b61fd8d 100644
--- a/pw_rpc/channel_test.cc
+++ b/pw_rpc/channel_test.cc
@@ -14,6 +14,8 @@
 
 #include "pw_rpc/channel.h"
 
+#include <cstddef>
+
 #include "gtest/gtest.h"
 #include "pw_rpc/internal/packet.h"
 #include "pw_rpc/internal/test_utils.h"
@@ -33,7 +35,7 @@
 }
 
 constexpr Packet kTestPacket(
-    PacketType::RESPONSE, 1, 42, 100, 0, {}, Status::NotFound());
+    PacketType::RESPONSE, 23, 42, 100, 0, {}, Status::NotFound());
 const size_t kReservedSize = 2 /* type */ + 2 /* channel */ + 5 /* service */ +
                              5 /* method */ + 2 /* payload key */ +
                              2 /* status (if not OK) */;
@@ -54,5 +56,23 @@
   EXPECT_EQ(kReservedSize, kTestPacket.MinEncodedSizeBytes());
 }
 
+TEST(ExtractChannelId, ValidPacket) {
+  std::byte buffer[64] = {};
+  Result<ConstByteSpan> result = kTestPacket.Encode(buffer);
+  ASSERT_EQ(result.status(), OkStatus());
+
+  Result<uint32_t> channel_id = ExtractChannelId(*result);
+  ASSERT_EQ(channel_id.status(), OkStatus());
+  EXPECT_EQ(*channel_id, 23u);
+}
+
+TEST(ExtractChannelId, InvalidPacket) {
+  constexpr std::byte buffer[64] = {std::byte{1}, std::byte{2}};
+
+  Result<uint32_t> channel_id = ExtractChannelId(buffer);
+
+  EXPECT_EQ(channel_id.status(), Status::DataLoss());
+}
+
 }  // namespace
 }  // namespace pw::rpc::internal
diff --git a/pw_rpc/client.cc b/pw_rpc/client.cc
index 4aa7404..21a950d 100644
--- a/pw_rpc/client.cc
+++ b/pw_rpc/client.cc
@@ -32,9 +32,7 @@
 }  // namespace
 
 Status Client::ProcessPacket(ConstByteSpan data) {
-  PW_TRY_ASSIGN(Result<Packet> result,
-                Endpoint::ProcessPacket(data, Packet::kClient));
-  Packet& packet = *result;
+  PW_TRY_ASSIGN(Packet packet, Endpoint::ProcessPacket(data, Packet::kClient));
 
   // Find an existing call for this RPC, if any.
   internal::rpc_lock().lock();
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/BUILD.bazel b/pw_rpc/java/main/dev/pigweed/pw_rpc/BUILD.bazel
index 5f834a0..4747b7f 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/BUILD.bazel
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/BUILD.bazel
@@ -36,11 +36,11 @@
     ],
     visibility = ["//visibility:public"],
     deps = [
+        "//pw_log/java/main/dev/pigweed/pw_log",
         "//pw_rpc:packet_proto_java_lite",
         "//third_party/google_auto:value",
         "@com_google_protobuf//java/lite",
         "@maven//:com_google_code_findbugs_jsr305",
-        "@maven//:com_google_flogger_flogger",
         "@maven//:com_google_guava_guava",
     ],
 )
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/Client.java b/pw_rpc/java/main/dev/pigweed/pw_rpc/Client.java
index ea3f7df..a62d77d 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/Client.java
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/Client.java
@@ -14,10 +14,10 @@
 
 package dev.pigweed.pw_rpc;
 
-// import com.google.common.flogger.FluentLogger;
 import com.google.protobuf.ExtensionRegistryLite;
 import com.google.protobuf.InvalidProtocolBufferException;
 import com.google.protobuf.MessageLite;
+import dev.pigweed.pw_log.Logger;
 import dev.pigweed.pw_rpc.internal.Packet.PacketType;
 import dev.pigweed.pw_rpc.internal.Packet.RpcPacket;
 import java.nio.ByteBuffer;
@@ -33,8 +33,7 @@
  * through the processPacket function.
  */
 public class Client {
-  // TODO(pwbug/611): Restore logging without a mandatory Flogger dependency.
-  // private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+  private static final Logger logger = Logger.forClass(Client.class);
 
   private final Map<Integer, Channel> channels;
   private final Map<Integer, Service> services;
@@ -81,17 +80,17 @@
     return create(channels, services, (rpc) -> new StreamObserver<MessageLite>() {
       @Override
       public void onNext(MessageLite value) {
-        // logger.atFine().log("%s received response: %s", rpc, value);
+        logger.atFine().log("%s received response: %s", rpc, value);
       }
 
       @Override
       public void onCompleted(Status status) {
-        // logger.atInfo().log("%s completed with status %s", rpc, status);
+        logger.atInfo().log("%s completed with status %s", rpc, status);
       }
 
       @Override
       public void onError(Status status) {
-        // logger.atWarning().log("%s terminated with error %s", rpc, status);
+        logger.atWarning().log("%s terminated with error %s", rpc, status);
       }
     });
   }
@@ -167,31 +166,30 @@
     try {
       packet = RpcPacket.parseFrom(data, ExtensionRegistryLite.getEmptyRegistry());
     } catch (InvalidProtocolBufferException e) {
-      // logger.atWarning().withCause(e).log("Failed to decode packet");
+      logger.atWarning().withCause(e).log("Failed to decode packet");
       return false;
     }
 
     if (packet.getChannelId() == 0 || packet.getServiceId() == 0 || packet.getMethodId() == 0) {
-      // logger.atWarning().log("Received corrupt packet with unset IDs");
+      logger.atWarning().log("Received corrupt packet with unset IDs");
       return false;
     }
 
     // Packets for the server use even type values.
     if (packet.getTypeValue() % 2 == 0) {
-      // logger.atFine().log("Ignoring %s packet for server", packet.getType().name());
+      logger.atFine().log("Ignoring %s packet for server", packet.getType().name());
       return false;
     }
 
     Channel channel = channels.get(packet.getChannelId());
     if (channel == null) {
-      // logger.atWarning().log(
-      //     "Received packet for unrecognized channel %d", packet.getChannelId());
+      logger.atWarning().log("Received packet for unrecognized channel %d", packet.getChannelId());
       return false;
     }
 
     PendingRpc rpc = lookupRpc(channel, packet);
     if (rpc == null) {
-      // logger.atInfo().log("Ignoring packet for unknown service method");
+      logger.atInfo().log("Ignoring packet for unknown service method");
       sendError(channel, packet, Status.NOT_FOUND);
       return true; // true since the packet was handled, even though it was invalid.
     }
@@ -200,31 +198,42 @@
     StreamObserverCall<?, ?> call =
         packet.getType().equals(PacketType.SERVER_STREAM) ? rpcs.getPending(rpc) : rpcs.clear(rpc);
     if (call == null) {
-      // logger.atInfo().log(
-      //    "Ignoring packet for RPC (%s) that isn't pending. Pending RPCs are: %s", rpc, rpcs);
+      logger.atFine().log(
+          "Ignoring packet for %s, which isn't pending. Pending RPCs are %s", rpc, rpcs);
       sendError(channel, packet, Status.FAILED_PRECONDITION);
       return true;
     }
 
     switch (packet.getType()) {
-      case SERVER_ERROR:
+      case SERVER_ERROR: {
         Status status = decodeStatus(packet);
-        // logger.atWarning().log("RPC %s failed with error %s", rpc, status);
+        logger.atWarning().log("%s failed with error %s", rpc, status);
         call.onError(status);
         break;
-      case RESPONSE:
+      }
+      case RESPONSE: {
+        Status status = decodeStatus(packet);
         // Server streaming an unary RPCs include a payload with their response packet.
         if (!rpc.method().isServerStreaming()) {
+          logger.atFiner().log("%s completed with status %s and %d B payload",
+              rpc,
+              status,
+              packet.getPayload().size());
           call.onNext(packet.getPayload());
+        } else {
+          logger.atFiner().log("%s completed with status %s", rpc, status);
         }
-        call.onCompleted(decodeStatus(packet));
+        call.onCompleted(status);
         break;
+      }
       case SERVER_STREAM:
+        logger.atFiner().log(
+            "%s received server stream with %d B payload", rpc, packet.getPayload().size());
         call.onNext(packet.getPayload());
         break;
       default:
-        // logger.atWarning().log(
-        //    "Unexpected PacketType %d for RPC %s", packet.getType().getNumber(), rpc);
+        logger.atWarning().log(
+            "%s received unexpected PacketType %d", rpc, packet.getType().getNumber());
     }
 
     return true;
@@ -234,7 +243,7 @@
     try {
       channel.send(Packets.error(packet, status));
     } catch (ChannelOutputException e) {
-      // logger.atWarning().withCause(e).log("Failed to send error packet");
+      logger.atWarning().withCause(e).log("Failed to send error packet");
     }
   }
 
@@ -254,8 +263,8 @@
   private static Status decodeStatus(RpcPacket packet) {
     Status status = Status.fromCode(packet.getStatus());
     if (status == null) {
-      // logger.atWarning().log(
-      //    "Illegal status code %d in packet; using Status.UNKNOWN ", packet.getStatus());
+      logger.atWarning().log(
+          "Illegal status code %d in packet; using Status.UNKNOWN ", packet.getStatus());
       return Status.UNKNOWN;
     }
     return status;
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/PendingRpc.java b/pw_rpc/java/main/dev/pigweed/pw_rpc/PendingRpc.java
index 6cc9b47..59e210d 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/PendingRpc.java
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/PendingRpc.java
@@ -32,7 +32,6 @@
 
   @Override
   public final String toString() {
-    return String.format(
-        Locale.ENGLISH, "PendingRpc(channel=%d, method=%s)", channel().id(), method());
+    return String.format(Locale.ENGLISH, "RpcCall[%s channel=%d]", method(), channel().id());
   }
 }
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/RpcManager.java b/pw_rpc/java/main/dev/pigweed/pw_rpc/RpcManager.java
index 12e6db1..bc690bb 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/RpcManager.java
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/RpcManager.java
@@ -14,16 +14,16 @@
 
 package dev.pigweed.pw_rpc;
 
-// import com.google.common.flogger.FluentLogger;
 import com.google.protobuf.MessageLite;
+import dev.pigweed.pw_log.Logger;
 import java.util.HashMap;
 import java.util.Map;
 import javax.annotation.Nullable;
 
 /** Tracks the state of service method invocations. */
 public class RpcManager {
-  // TODO(pwbug/611): Restore logging without a mandatory Flogger dependency.
-  // private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+  private static final Logger logger = Logger.forClass(RpcManager.class);
+
   private final Map<PendingRpc, StreamObserverCall<?, ?>> pending = new HashMap<>();
 
   /**
@@ -37,7 +37,7 @@
   public synchronized StreamObserverCall<?, ?> start(
       PendingRpc rpc, StreamObserverCall<?, ?> call, @Nullable MessageLite payload)
       throws ChannelOutputException {
-    // logger.atFine().log("Start %s", rpc);
+    logger.atFine().log("%s starting", rpc);
     rpc.channel().send(Packets.request(rpc, payload));
     return pending.put(rpc, call);
   }
@@ -51,12 +51,12 @@
   @Nullable
   public synchronized StreamObserverCall<?, ?> open(
       PendingRpc rpc, StreamObserverCall<?, ?> call, @Nullable MessageLite payload) {
-    // logger.atFine().log("Open %s", rpc);
+    logger.atFine().log("%s opening", rpc);
     try {
       rpc.channel().send(Packets.request(rpc, payload));
     } catch (ChannelOutputException e) {
-      // logger.atFine().withCause(e).log(
-      //    "Ignoring error opening %s; listening for unrequested responses", rpc);
+      logger.atFiner().withCause(e).log(
+          "Ignoring error opening %s; listening for unrequested responses", rpc);
     }
     return pending.put(rpc, call);
   }
@@ -67,7 +67,7 @@
       throws ChannelOutputException {
     StreamObserverCall<?, ?> call = pending.remove(rpc);
     if (call != null) {
-      // logger.atFine().log("Cancel %s", rpc);
+      logger.atFine().log("%s was cancelled", rpc);
       rpc.channel().send(Packets.cancel(rpc));
     }
     return call;
@@ -88,6 +88,7 @@
       throws ChannelOutputException {
     StreamObserverCall<?, ?> call = pending.get(rpc);
     if (call != null) {
+      logger.atFiner().log("%s client stream closed", rpc);
       rpc.channel().send(Packets.clientStreamEnd(rpc));
     }
     return call;
@@ -95,11 +96,7 @@
 
   @Nullable
   public synchronized StreamObserverCall<?, ?> clear(PendingRpc rpc) {
-    StreamObserverCall<?, ?> call = pending.remove(rpc);
-    if (call != null) {
-      // logger.atFine().log("Clear %s", rpc);
-    }
-    return call;
+    return pending.remove(rpc);
   }
 
   @Nullable
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/Status.java b/pw_rpc/java/main/dev/pigweed/pw_rpc/Status.java
index 72a1cfe..58ef8b2 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/Status.java
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/Status.java
@@ -14,6 +14,8 @@
 
 package dev.pigweed.pw_rpc;
 
+import javax.annotation.Nullable;
+
 /** Status object for RPC statuses. Must match gRPC's status codes. */
 public enum Status {
   OK(0),
@@ -56,6 +58,7 @@
     return code == 0;
   }
 
+  @Nullable
   public static Status fromCode(int code) {
     return code >= 0 && code < values.length ? values[code] : null;
   }
diff --git a/pw_rpc/java/main/dev/pigweed/pw_rpc/StreamObserverCall.java b/pw_rpc/java/main/dev/pigweed/pw_rpc/StreamObserverCall.java
index f59d151..537362c 100644
--- a/pw_rpc/java/main/dev/pigweed/pw_rpc/StreamObserverCall.java
+++ b/pw_rpc/java/main/dev/pigweed/pw_rpc/StreamObserverCall.java
@@ -14,11 +14,11 @@
 
 package dev.pigweed.pw_rpc;
 
-// import com.google.common.flogger.FluentLogger;
 import com.google.common.util.concurrent.AbstractFuture;
 import com.google.protobuf.ByteString;
 import com.google.protobuf.InvalidProtocolBufferException;
 import com.google.protobuf.MessageLite;
+import dev.pigweed.pw_log.Logger;
 import dev.pigweed.pw_rpc.Call.ClientStreaming;
 import java.util.function.Consumer;
 import javax.annotation.Nullable;
@@ -35,8 +35,7 @@
  */
 class StreamObserverCall<RequestT extends MessageLite, ResponseT extends MessageLite>
     implements ClientStreaming<RequestT> {
-  // TODO(pwbug/611): Restore logging without a mandatory Flogger dependency.
-  // private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+  private static final Logger logger = Logger.forClass(StreamObserverCall.class);
 
   private final RpcManager rpcs;
   private final PendingRpc rpc;
@@ -258,8 +257,8 @@
     try {
       return (ResponseT) rpc.method().decodeResponsePayload(payload);
     } catch (InvalidProtocolBufferException e) {
-      // logger.atWarning().withCause(e).log(
-      //    "Failed to decode response for method %s; skipping packet", rpc.method().name());
+      logger.atWarning().withCause(e).log(
+          "Failed to decode response for method %s; skipping packet", rpc.method().name());
       return null;
     }
   }
diff --git a/pw_rpc/public/pw_rpc/channel.h b/pw_rpc/public/pw_rpc/channel.h
index 099a994..8495956 100644
--- a/pw_rpc/public/pw_rpc/channel.h
+++ b/pw_rpc/public/pw_rpc/channel.h
@@ -19,11 +19,17 @@
 #include <type_traits>
 
 #include "pw_assert/assert.h"
+#include "pw_bytes/span.h"
+#include "pw_result/result.h"
 #include "pw_rpc/internal/lock.h"
 #include "pw_status/status.h"
 
 namespace pw::rpc {
 
+// Extracts the channel ID from a pw_rpc packet. Returns DATA_LOSS if the
+// packet is corrupt and the channel ID could not be found.
+Result<uint32_t> ExtractChannelId(ConstByteSpan packet);
+
 class ChannelOutput {
  public:
   // Returned from MaximumTransmissionUnit() to indicate that this ChannelOutput
diff --git a/pw_rpc/py/docs.rst b/pw_rpc/py/docs.rst
index 9d29a10..2c0e306 100644
--- a/pw_rpc/py/docs.rst
+++ b/pw_rpc/py/docs.rst
@@ -23,6 +23,13 @@
     ClientStreamingCall,
     BidirectionalStreamingCall,
 
+pw_rpc.descriptors
+==================
+.. automodule:: pw_rpc.descriptors
+  :members:
+    Channel,
+    ChannelManipulator,
+
 pw_rpc.console_tools
 ====================
 .. automodule:: pw_rpc.console_tools
diff --git a/pw_rpc/py/pw_rpc/__init__.py b/pw_rpc/py/pw_rpc/__init__.py
index 1f1e72e..ff1f871 100644
--- a/pw_rpc/py/pw_rpc/__init__.py
+++ b/pw_rpc/py/pw_rpc/__init__.py
@@ -14,4 +14,4 @@
 """Package for calling Pigweed RPCs from Python."""
 
 from pw_rpc.client import Client
-from pw_rpc.descriptors import Channel
+from pw_rpc.descriptors import Channel, ChannelManipulator
diff --git a/pw_rpc/py/pw_rpc/descriptors.py b/pw_rpc/py/pw_rpc/descriptors.py
index fdae732..57ba984 100644
--- a/pw_rpc/py/pw_rpc/descriptors.py
+++ b/pw_rpc/py/pw_rpc/descriptors.py
@@ -13,6 +13,7 @@
 # the License.
 """Types representing the basic pw_rpc concepts: channel, service, method."""
 
+import abc
 from dataclasses import dataclass
 import enum
 from inspect import Parameter
@@ -37,6 +38,57 @@
         return f'Channel({self.id})'
 
 
+class ChannelManipulator(abc.ABC):
+    """A a pipe interface that may manipulate packets before they're sent.
+
+    ``ChannelManipulator``s allow application-specific packet handling to be
+    injected into the packet processing pipeline for an ingress or egress
+    channel-like pathway. This is particularly useful for integration testing
+    resilience to things like packet loss on a usually-reliable transport. RPC
+    server integrations (e.g. ``HdlcRpcLocalServerAndClient``) may provide an
+    opportunity to inject a ``ChannelManipulator`` for this use case.
+
+    A ``ChannelManipulator`` should not modify send_packet, as the consumer of a
+    ``ChannelManipulator`` will use ``send_packet`` to insert the provided
+    ``ChannelManipulator`` into a packet processing path.
+
+    For example:
+
+    .. code-block:: python
+
+      class PacketLogger(ChannelManipulator):
+          def process_and_send(self, packet: bytes) -> None:
+              _LOG.debug('Received packet with payload: %s', str(packet))
+              self.send_packet(packet)
+
+
+      packet_logger = PacketLogger()
+
+      # Configure actual send command.
+      packet_logger.send_packet = socket.sendall
+
+      # Route the output channel through the PacketLogger channel manipulator.
+      channels = tuple(Channel(_DEFAULT_CHANNEL, packet_logger))
+
+      # Create a RPC client.
+      client = HdlcRpcClient(socket.read, protos, channels, stdout)
+    """
+    def __init__(self):
+        self.send_packet: Callable[[bytes], Any] = lambda _: None
+
+    @abc.abstractmethod
+    def process_and_send(self, packet: bytes) -> None:
+        """Processes an incoming packet before optionally sending it.
+
+        Implementations of this method may send the processed packet, multiple
+        packets, or no packets at all via the registered `send_packet()`
+        handler.
+        """
+
+    def __call__(self, data: bytes) -> None:
+        self.process_and_send(data)
+
+
 @dataclass(frozen=True, eq=False)
 class Service:
     """Describes an RPC service."""
diff --git a/pw_rpc/server.cc b/pw_rpc/server.cc
index 78681b5..d7634ab 100644
--- a/pw_rpc/server.cc
+++ b/pw_rpc/server.cc
@@ -34,9 +34,8 @@
 
 Status Server::ProcessPacket(ConstByteSpan packet_data,
                              ChannelOutput* interface) {
-  PW_TRY_ASSIGN(Result<Packet> result,
+  PW_TRY_ASSIGN(Packet packet,
                 Endpoint::ProcessPacket(packet_data, Packet::kServer));
-  Packet& packet = *result;
 
   internal::rpc_lock().lock();
   internal::ServerCall* const call =
diff --git a/pw_software_update/BUILD.bazel b/pw_software_update/BUILD.bazel
index c8b8b3f..ef586fa 100644
--- a/pw_software_update/BUILD.bazel
+++ b/pw_software_update/BUILD.bazel
@@ -52,6 +52,7 @@
         "//pw_protobuf",
         "//pw_status",
         "//pw_stream",
+        "//pw_string",
     ],
 )
 
diff --git a/pw_software_update/BUILD.gn b/pw_software_update/BUILD.gn
index 13d5632..f59438b 100644
--- a/pw_software_update/BUILD.gn
+++ b/pw_software_update/BUILD.gn
@@ -102,6 +102,7 @@
       ":config",
       ":protos.pwpb",
       dir_pw_log,
+      dir_pw_string,
     ]
     sources = [
       "manifest_accessor.cc",
diff --git a/pw_software_update/bundled_update_service.cc b/pw_software_update/bundled_update_service.cc
index 86417e0..cbbc67e 100644
--- a/pw_software_update/bundled_update_service.cc
+++ b/pw_software_update/bundled_update_service.cc
@@ -12,16 +12,17 @@
 // License for the specific language governing permissions and limitations under
 // the License.
 
-#include "pw_software_update/config.h"
+#define PW_LOG_MODULE_NAME "PWSU"
+#define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
 
-#define PW_LOG_LEVEL PW_SOFTWARE_UPDATE_CONFIG_LOG_LEVEL
+#include "pw_software_update/bundled_update_service.h"
 
 #include <mutex>
 #include <string_view>
 
 #include "pw_log/log.h"
 #include "pw_result/result.h"
-#include "pw_software_update/bundled_update_service.h"
+#include "pw_software_update/config.h"
 #include "pw_software_update/manifest_accessor.h"
 #include "pw_software_update/update_bundle.pwpb.h"
 #include "pw_status/status.h"
diff --git a/pw_software_update/public/pw_software_update/config.h b/pw_software_update/public/pw_software_update/config.h
index 5e72dcf..3eaf18c 100644
--- a/pw_software_update/public/pw_software_update/config.h
+++ b/pw_software_update/public/pw_software_update/config.h
@@ -13,12 +13,6 @@
 // the License.
 #pragma once
 
-// The log level to use for this module. Logs below this level are omitted.
-#define PW_LOG_MODULE_NAME "PWSU"
-#ifndef PW_SOFTWARE_UPDATE_CONFIG_LOG_LEVEL
-#define PW_SOFTWARE_UPDATE_CONFIG_LOG_LEVEL PW_LOG_LEVEL_WARN
-#endif  // PW_SOFTWARE_UPDATE_CONFIG_LOG_LEVEL
-
 // The size of the buffer to create on stack for streaming manifest data from
 // the bundle reader.
 #define WRITE_MANIFEST_STREAM_PIPE_BUFFER_SIZE 8
diff --git a/pw_software_update/update_bundle_accessor.cc b/pw_software_update/update_bundle_accessor.cc
index 70050fb..453f932 100644
--- a/pw_software_update/update_bundle_accessor.cc
+++ b/pw_software_update/update_bundle_accessor.cc
@@ -12,6 +12,9 @@
 // License for the specific language governing permissions and limitations under
 // the License.
 
+#define PW_LOG_MODULE_NAME "PWSU"
+#define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
+
 #include "pw_software_update/update_bundle_accessor.h"
 
 #include <cstddef>
@@ -28,8 +31,7 @@
 #include "pw_software_update/update_bundle.pwpb.h"
 #include "pw_stream/interval_reader.h"
 #include "pw_stream/memory_stream.h"
-
-#define PW_LOG_LEVEL PW_SOFTWARE_UPDATE_CONFIG_LOG_LEVEL
+#include "pw_string/string_builder.h"
 
 namespace pw::software_update {
 namespace {
@@ -122,7 +124,7 @@
     }
 
     if (!key_id_is_allowed) {
-      PW_LOG_DEBUG("Skipping a key id not listed in allowed key ids.");
+      PW_LOG_DEBUG("Skipping a key id not listed in allowed key ids");
       LogKeyId(key_id_buf);
       continue;
     }
@@ -164,11 +166,9 @@
     return Status::NotFound();
   }
 
-  PW_LOG_DEBUG(
-      "Not enough number of signatures verified. Requires at least %u, "
-      "verified %u",
-      threshold.value(),
-      verified_count);
+  PW_LOG_ERROR("Insufficient signatures. Requires at least %u, verified %u",
+               threshold.value(),
+               verified_count);
   return Status::Unauthenticated();
 }
 
@@ -249,9 +249,13 @@
 }  // namespace
 
 Status UpdateBundleAccessor::OpenAndVerify() {
-  PW_TRY(DoOpen());
+  if (Status status = DoOpen(); !status.ok()) {
+    PW_LOG_ERROR("Failed to open staged bundle");
+    return status;
+  }
 
   if (Status status = DoVerify(); !status.ok()) {
+    PW_LOG_ERROR("Failed to verified staged bundle");
     Close();
     return status;
   }
@@ -359,7 +363,7 @@
 
 Status UpdateBundleAccessor::DoVerify() {
 #if PW_SOFTWARE_UPDATE_DISABLE_BUNDLE_VERIFICATION
-  PW_LOG_WARN("Update bundle verification is disabled.");
+  PW_LOG_WARN("Bundle verification is compiled out.");
   bundle_verified_ = true;
   return OkStatus();
 #else   // PW_SOFTWARE_UPDATE_DISABLE_BUNDLE_VERIFICATION
@@ -367,15 +371,24 @@
 
   // Verify and upgrade the on-device trust to the incoming root metadata if
   // one is included.
-  PW_TRY(UpgradeRoot());
+  if (Status status = UpgradeRoot(); !status.ok()) {
+    PW_LOG_ERROR("Failed to upgrade to Root in staged bundle");
+    return status;
+  }
 
   // TODO(pwbug/456): Verify the targets metadata against the current trusted
   // root.
-  PW_TRY(VerifyTargetsMetadata());
+  if (Status status = VerifyTargetsMetadata(); !status.ok()) {
+    PW_LOG_ERROR("Failed to verify Targets metadata");
+    return status;
+  }
 
   // TODO(pwbug/456): Investigate whether targets payload verification should
   // be performed here or deferred until a specific target is requested.
-  PW_TRY(VerifyTargetsPayloads());
+  if (Status status = VerifyTargetsPayloads(); !status.ok()) {
+    PW_LOG_ERROR("Failed to verify all manifested payloads");
+    return status;
+  }
 
   // TODO(pwbug/456): Invoke the backend to do downstream verification of the
   // bundle (e.g. compatibility and manifest completeness checks).
@@ -387,8 +400,10 @@
 
 protobuf::Message UpdateBundleAccessor::GetOnDeviceTrustedRoot() {
   Result<stream::SeekableReader*> res = backend_.GetRootMetadataReader();
-  PW_TRY(res.status());
-  PW_CHECK_NOTNULL(res.value());
+  if (!(res.ok() && res.value())) {
+    PW_LOG_ERROR("Failed to get on-device Root metadata");
+    return res.status();
+  }
   // Seek to the beginning so that ConservativeReadLimit() returns the correct
   // value.
   PW_TRY(res.value()->Seek(0, stream::Stream::Whence::kBeginning));
@@ -426,7 +441,7 @@
 
   if (!new_root.status().ok()) {
     // Don't bother upgrading if not found or invalid.
-    PW_LOG_WARN("Incoming root metadata not found or invalid.");
+    PW_LOG_WARN("Incoming root metadata not found or invalid");
     return OkStatus();
   }
 
@@ -439,9 +454,8 @@
   // Verify the signatures against the trusted root metadata.
   Result<bool> verify_res =
       VerifyRootMetadataSignatures(trusted_root_, new_root);
-  PW_TRY(verify_res.status());
-  if (!verify_res.value()) {
-    PW_LOG_INFO("Fail to verify signatures against the current root");
+  if (!(verify_res.status().ok() && verify_res.value())) {
+    PW_LOG_ERROR("Failed to verify incoming root against the current root");
     return Status::Unauthenticated();
   }
 
@@ -456,9 +470,8 @@
 
   // Verify the signatures against the new root metadata.
   verify_res = VerifyRootMetadataSignatures(new_root, new_root);
-  PW_TRY(verify_res.status());
-  if (!verify_res.value()) {
-    PW_LOG_INFO("Fail to verify signatures against the new root");
+  if (!(verify_res.status().ok() && verify_res.value())) {
+    PW_LOG_ERROR("Fail to verify incoming root against itself");
     return Status::Unauthenticated();
   }
 
@@ -483,7 +496,7 @@
   PW_TRY(new_root_version.status());
 
   if (trusted_root_version.value() > new_root_version.value()) {
-    PW_LOG_DEBUG("Root attempts to rollback from %u to %u.",
+    PW_LOG_ERROR("Root attempts to rollback from %u to %u",
                  trusted_root_version.value(),
                  new_root_version.value());
     return Status::Unauthenticated();
@@ -513,7 +526,8 @@
 
   if (self_verifying && !trusted_root_.status().ok()) {
     PW_LOG_WARN(
-        "Targets metadata self-verification is noop due to unavailable Root.");
+        "Self-verification won't verify Targets metadata because there is no "
+        "root");
     return OkStatus();
   }
 
@@ -580,25 +594,27 @@
                                key_mapping);
 
   if (self_verifying && sig_res.IsNotFound()) {
-    PW_LOG_WARN("Unsigned bundles ignored by self-verification.");
+    PW_LOG_WARN("Self-verification ignoring unsigned bundle");
     return OkStatus();
   }
 
-  PW_TRY(sig_res);
+  if (!sig_res.ok()) {
+    PW_LOG_ERROR("Targets Metadata failed signature verification");
+    return Status::Unauthenticated();
+  }
 
   // TODO(pwbug/456): Check targets metadtata content.
 
   if (self_verifying) {
     // Don't bother because it does not matter.
-    PW_LOG_WARN(
-        "Self verification does not do Targets metadata anti-rollback.");
+    PW_LOG_WARN("Self verification does not do Targets metadata anti-rollback");
     return OkStatus();
   }
 
   // Anti-rollback check.
   ManifestAccessor device_manifest = GetOnDeviceManifest();
   if (device_manifest.status().IsNotFound()) {
-    PW_LOG_WARN("Skipping OTA anti-rollback due to absent device manifest.");
+    PW_LOG_WARN("Skipping OTA anti-rollback due to absent device manifest");
     return OkStatus();
   }
 
@@ -612,7 +628,7 @@
           software_update::TargetsMetadata::Fields::COMMON_METADATA));
   PW_TRY(new_version.status());
   if (current_version.value() > new_version.value()) {
-    PW_LOG_DEBUG("Targets attempt to rollback from %u to %u.",
+    PW_LOG_ERROR("Blocking Targets metadata rollback from %u to %u",
                  current_version.value(),
                  new_version.value());
     return Status::Unauthenticated();
@@ -646,7 +662,7 @@
         target_file.AsUint64(static_cast<uint32_t>(TargetFile::Fields::LENGTH));
     PW_TRY(target_length.status());
     if (target_length.value() > PW_SOFTWARE_UPDATE_MAX_TARGET_PAYLOAD_SIZE) {
-      PW_LOG_ERROR("Target payload too large. Maximum supported is %llu bytes.",
+      PW_LOG_ERROR("Target payload too big. Maximum is %llu bytes",
                    PW_SOFTWARE_UPDATE_MAX_TARGET_PAYLOAD_SIZE);
       return Status::OutOfRange();
     }
@@ -668,8 +684,13 @@
     }
     PW_TRY(target_sha256.status());
 
-    PW_TRY(VerifyTargetPayload(
-        bundle_manifest, target_name.value(), target_length, target_sha256));
+    if (Status status = VerifyTargetPayload(
+            bundle_manifest, target_name.value(), target_length, target_sha256);
+        !status.ok()) {
+      PW_LOG_ERROR("Target: %s failed verification",
+                   pw::MakeString(target_name.value()).c_str());
+      return status;
+    }
   }  // for each target file in manifest.
 
   return OkStatus();
@@ -686,6 +707,7 @@
       payloads_map[target_name].GetBytesReader();
 
   Status status;
+
   if (payload_reader.ok()) {
     status = VerifyInBundleTargetPayload(
         expected_length, expected_sha256, payload_reader);
@@ -712,7 +734,7 @@
   if (!device_manifest.ok()) {
     PW_LOG_ERROR(
         "Can't verify personalized-out target because on-device manifest is "
-        "not found.");
+        "not found");
     return Status::Unauthenticated();
   }
 
@@ -720,7 +742,7 @@
   if (!cached.ok()) {
     PW_LOG_ERROR(
         "Can't verify personalized-out target because it is not found from "
-        "on-device manifest.");
+        "on-device manifest");
     return Status::Unauthenticated();
   }
 
@@ -753,7 +775,7 @@
   Result<bool> hash_equal = expected_sha256.Equal(sha256);
   PW_TRY(hash_equal.status());
   if (!hash_equal.value()) {
-    PW_LOG_ERROR("Personalized-out target has a bad hash.");
+    PW_LOG_ERROR("Personalized-out target has a bad hash");
     return Status::Unauthenticated();
   }
 
@@ -783,7 +805,7 @@
   Result<bool> hash_equal = expected_sha256.Equal(actual_sha256);
   PW_TRY(hash_equal.status());
   if (!hash_equal.value()) {
-    PW_LOG_ERROR("Wrong payload sha256 hash.");
+    PW_LOG_ERROR("Wrong payload sha256 hash");
     return Status::Unauthenticated();
   }
 
diff --git a/pw_sys_io_emcraft_sf2/BUILD.bazel b/pw_sys_io_emcraft_sf2/BUILD.bazel
new file mode 100644
index 0000000..fb10a78
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/BUILD.bazel
@@ -0,0 +1,39 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+load(
+    "//pw_build:pigweed.bzl",
+    "pw_cc_library",
+)
+
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+pw_cc_library(
+    name = "pw_sys_io_emcraft_sf2",
+    srcs = [
+        "pw_sys_io_emcraft_sf2_private/config.h",
+        "sys_io_emcraft_sf2.cc",
+    ],
+    hdrs = ["public/pw_sys_io_emcraft_sf2/init.h"],
+    target_compatible_with = [
+        "@platforms//os:none",
+    ],
+    deps = [
+        "//pw_boot_cortex_m:armv7m",
+        "//pw_preprocessor",
+        "//pw_sys_io",
+    ],
+)
diff --git a/pw_sys_io_emcraft_sf2/BUILD.gn b/pw_sys_io_emcraft_sf2/BUILD.gn
new file mode 100644
index 0000000..236722f
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/BUILD.gn
@@ -0,0 +1,60 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import("//build_overrides/pigweed.gni")
+
+import("$dir_pw_build/module_config.gni")
+import("$dir_pw_build/target_types.gni")
+import("$dir_pw_docgen/docs.gni")
+import("$dir_pw_third_party/smartfusion_mss/mss.gni")
+
+declare_args() {
+  # The build target that overrides the default configuration options for this
+  # module. This should point to a source set that provides defines through a
+  # public config (which may -include a file or add defines directly).
+  pw_sys_io_emcraft_sf2_CONFIG = pw_build_DEFAULT_MODULE_CONFIG
+}
+
+config("public_includes") {
+  include_dirs = [ "public" ]
+}
+
+pw_source_set("config") {
+  public_deps = [ pw_sys_io_emcraft_sf2_CONFIG ]
+  public = [ "pw_sys_io_emcraft_sf2_private/config.h" ]
+  visibility = [ ":*" ]
+}
+
+pw_source_set("pw_sys_io_emcraft_sf2") {
+  public_configs = [ ":public_includes" ]
+  public_deps = [
+    "$dir_pw_preprocessor",
+    "$dir_pw_third_party/smartfusion_mss",
+  ]
+  if (dir_pw_third_party_smartfusion_mss != "") {
+    public_deps += [ "$dir_pw_third_party/smartfusion_mss" ]
+  }
+  public = [ "public/pw_sys_io_emcraft_sf2/init.h" ]
+  sources = [ "sys_io_emcraft_sf2.cc" ]
+  deps = [
+    ":config",
+    "$dir_pw_status",
+    "$dir_pw_sys_io:default_putget_bytes",
+    "$dir_pw_sys_io:facade",
+  ]
+}
+
+pw_doc_group("docs") {
+  sources = [ "docs.rst" ]
+}
diff --git a/pw_sys_io_emcraft_sf2/docs.rst b/pw_sys_io_emcraft_sf2/docs.rst
new file mode 100644
index 0000000..66ef6c9
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/docs.rst
@@ -0,0 +1,44 @@
+.. _module-pw_sys_io_emcraft_sf2:
+
+---------------------
+pw_sys_io_emcraft_sf2
+---------------------
+
+``pw_sys_io_emcraft_sf2`` implements the ``pw_sys_io`` facade over
+UART.
+
+The Emcraft SF2 sys IO backend provides a UART driver layer that allows
+applications built against the ``pw_sys_io`` interface to run on a
+SmartFusion/2 chip and do simple input/output via UART. However, this should
+work with all Smartfusion/2 variations.
+
+This backend allows you to configure which UART to use. The point of it is to
+provide bare-minimum platform code needed to do UART reads/writes.
+
+Setup
+=====
+This module requires relatively minimal setup:
+
+  1. Write code against the ``pw_sys_io`` facade.
+  2. Specify the ``dir_pw_sys_io_backend`` GN global variable to point to this
+     backend.
+  3. pw_sys_io_Init() provided by this module needs to be called in early boot
+     to get pw_sys_io into a working state.
+  4. Build an executable with a main() function using a toolchain that
+     supports Cortex-M3.
+
+.. note::
+  This module provides early firmware init, so it will conflict with other
+  modules that do any early device init.
+
+Module usage
+============
+After building an executable that utilizes this backend, flash the
+produced .elf binary to the development board. Then, using a serial
+communication terminal like minicom/screen (Linux/Mac) or TeraTerm (Windows),
+connect to the device at a baud rate of 57600 (8N1).
+
+Dependencies
+============
+  * ``pw_sys_io`` facade
+  * ``pw_preprocessor`` module
diff --git a/pw_sys_io_emcraft_sf2/public/pw_sys_io_emcraft_sf2/init.h b/pw_sys_io_emcraft_sf2/public/pw_sys_io_emcraft_sf2/init.h
new file mode 100644
index 0000000..ccfd61e
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/public/pw_sys_io_emcraft_sf2/init.h
@@ -0,0 +1,23 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include "pw_preprocessor/util.h"
+
+PW_EXTERN_C_START
+
+// The actual implement of PreMainInit() in sys_io_BACKEND.
+void pw_sys_io_Init(void);
+
+PW_EXTERN_C_END
diff --git a/pw_sys_io_emcraft_sf2/pw_sys_io_emcraft_sf2_private/config.h b/pw_sys_io_emcraft_sf2/pw_sys_io_emcraft_sf2_private/config.h
new file mode 100644
index 0000000..fb3db3a
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/pw_sys_io_emcraft_sf2_private/config.h
@@ -0,0 +1,22 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#pragma once
+
+// Defaults to USART1 on the SmartFusion2, but can be overridden.
+
+// The USART peripheral number to use. (1 for USART1, 2 for USART2, etc.)
+#ifndef PW_SYS_IO_EMCRAFT_SF2_USART_NUM
+#define PW_SYS_IO_EMCRAFT_SF2_USART_NUM 1
+#endif  // PW_SYS_IO_EMCRAFT_SF2_USART_NUM
diff --git a/pw_sys_io_emcraft_sf2/sys_io_emcraft_sf2.cc b/pw_sys_io_emcraft_sf2/sys_io_emcraft_sf2.cc
new file mode 100644
index 0000000..d94a621
--- /dev/null
+++ b/pw_sys_io_emcraft_sf2/sys_io_emcraft_sf2.cc
@@ -0,0 +1,104 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <cinttypes>
+
+#include "mss_gpio/mss_gpio.h"
+#include "mss_uart/mss_uart.h"
+#include "pw_preprocessor/concat.h"
+#include "pw_status/status.h"
+#include "pw_sys_io/sys_io.h"
+#include "pw_sys_io_emcraft_sf2_private/config.h"
+
+namespace {
+
+// LEDs GPIOs
+
+constexpr mss_gpio_id_t kDs3LedGPIO = MSS_GPIO_1;
+constexpr mss_gpio_id_t kDs4LEDGPIO = MSS_GPIO_2;
+constexpr uint32_t kDs3LedMask = MSS_GPIO_1_MASK;
+constexpr uint32_t kDs4LedMask = MSS_GPIO_2_MASK;
+
+constexpr uint32_t kReadDataReady = 0x1u;
+
+}  // namespace
+
+extern "C" void pw_sys_io_Init() {
+  // Configure MSS GPIOs.
+#if SF2_MSS_NO_BOOTLOADER
+  MSS_GPIO_init();
+#endif
+
+  MSS_GPIO_config(kDs3LedGPIO, MSS_GPIO_OUTPUT_MODE);
+  MSS_GPIO_config(kDs4LEDGPIO, MSS_GPIO_OUTPUT_MODE);
+  // Set LEDs to initial app state
+  MSS_GPIO_set_outputs(MSS_GPIO_get_outputs() | kDs4LedMask);
+
+  // Initialize the UART0 controller (57600, 8N1)
+  // Due to a HW eratta in SF2, we need to run at 57600 for
+  // in-system-programming mode. If we are not upgrading FPGA or flash then we
+  // can use a faster BAUD.
+  MSS_UART_init(
+      &g_mss_uart0,
+      MSS_UART_57600_BAUD,
+      MSS_UART_DATA_8_BITS | MSS_UART_NO_PARITY | MSS_UART_ONE_STOP_BIT);
+}
+
+// This whole implementation is very inefficient because it uses the synchronous
+// polling UART API and only reads / writes 1 byte at a time.
+namespace pw::sys_io {
+
+Status ReadByte(std::byte* dest) {
+  while (true) {
+    if (TryReadByte(dest).ok()) {
+      return OkStatus();
+    }
+  }
+}
+
+Status TryReadByte(std::byte* dest) {
+  if (!(g_mss_uart0.hw_reg->LSR & kReadDataReady)) {
+    return Status::Unavailable();
+  }
+
+  *dest = static_cast<std::byte>(g_mss_uart0.hw_reg->RBR);
+  return OkStatus();
+}
+
+Status WriteByte(std::byte b) {
+  // Wait for TX buffer to be empty. When the buffer is empty, we can write
+  // a value to be dumped out of UART.
+  const uint8_t pbuff = (uint8_t)b;
+
+  MSS_UART_polled_tx(&g_mss_uart0, &pbuff, 1);
+  return OkStatus();
+}
+
+// Writes a string using pw::sys_io, and add newline characters at the end.
+StatusWithSize WriteLine(const std::string_view& s) {
+  size_t chars_written = 0;
+  StatusWithSize result = WriteBytes(std::as_bytes(std::span(s)));
+  if (!result.ok()) {
+    return result;
+  }
+  chars_written += result.size();
+
+  // Write trailing newline.
+  result = WriteBytes(std::as_bytes(std::span("\r\n", 2)));
+  chars_written += result.size();
+
+  return StatusWithSize(OkStatus(), chars_written);
+}
+
+}  // namespace pw::sys_io
diff --git a/pw_system/BUILD.gn b/pw_system/BUILD.gn
index 85ee99e..fd5cb5a 100644
--- a/pw_system/BUILD.gn
+++ b/pw_system/BUILD.gn
@@ -16,6 +16,7 @@
 
 import("$dir_pigweed/third_party/freertos/freertos.gni")
 import("$dir_pigweed/third_party/nanopb/nanopb.gni")
+import("$dir_pigweed/third_party/smartfusion_mss/mss.gni")
 import("$dir_pigweed/third_party/stm32cube/stm32cube.gni")
 import("$dir_pw_build/error.gni")
 import("$dir_pw_build/facade.gni")
@@ -102,7 +103,6 @@
 
 pw_facade("rpc_server") {
   backend = pw_system_RPC_SERVER_BACKEND
-  visibility = [ ":*" ]
   public = [ "public/pw_system/rpc_server.h" ]
   public_configs = [ ":public_include_path" ]
   public_deps = [
@@ -113,7 +113,6 @@
 
 pw_facade("io") {
   backend = pw_system_IO_BACKEND
-  visibility = [ ":*" ]
   public_configs = [ ":public_include_path" ]
   public = [ "public/pw_system/io.h" ]
   public_deps = [ "$dir_pw_stream" ]
@@ -148,7 +147,6 @@
 }
 
 pw_source_set("work_queue") {
-  visibility = [ ":*" ]
   public_configs = [ ":public_include_path" ]
   public = [ "public/pw_system/work_queue.h" ]
   sources = [ "work_queue.cc" ]
@@ -229,6 +227,14 @@
         dir_pw_third_party_freertos != "") {
       deps += [ ":system_example($dir_pigweed/targets/stm32f429i_disc1_stm32cube:stm32f429i_disc1_stm32cube.size_optimized)" ]
     }
+    if (dir_pw_third_party_smartfusion_mss != "" &&
+        dir_pw_third_party_freertos != "") {
+      deps += [
+        ":system_example($dir_pigweed/targets/emcraft_sf2_som:emcraft_sf2_som.size_optimized)",
+        ":system_example($dir_pigweed/targets/emcraft_sf2_som:emcraft_sf2_som.speed_optimized)",
+        ":system_example($dir_pigweed/targets/emcraft_sf2_som:emcraft_sf2_som_debug.debug)",
+      ]
+    }
   }
 } else {
   pw_error("system_examples") {
diff --git a/pw_system/py/pw_system/console.py b/pw_system/py/pw_system/console.py
index 2a676e0..7843837 100644
--- a/pw_system/py/pw_system/console.py
+++ b/pw_system/py/pw_system/console.py
@@ -61,9 +61,7 @@
 from pw_log.proto import log_pb2
 from pw_rpc.console_tools.console import flattened_rpc_completions
 from pw_system.device import Device
-from pw_tokenizer.database import LoadTokenDatabases
-from pw_tokenizer.detokenize import Detokenizer
-from pw_tokenizer import tokens
+from pw_tokenizer.detokenize import AutoUpdatingDetokenizer
 
 _LOG = logging.getLogger('tools')
 _DEVICE_LOG = logging.getLogger('rpc_device')
@@ -105,7 +103,7 @@
     parser.add_argument("--token-databases",
                         metavar='elf_or_token_database',
                         nargs="+",
-                        action=LoadTokenDatabases,
+                        type=Path,
                         help="Path to tokenizer database csv file(s).")
     parser.add_argument('--config-file',
                         type=Path,
@@ -211,7 +209,7 @@
 def console(device: str,
             baudrate: int,
             proto_globs: Collection[str],
-            token_databases: Collection[tokens.Database],
+            token_databases: Collection[Path],
             socket_addr: str,
             logfile: str,
             output: Any,
@@ -235,8 +233,8 @@
 
     detokenizer = None
     if token_databases:
-        detokenizer = Detokenizer(tokens.Database.merged(*token_databases),
-                                  show_errors=True)
+        detokenizer = AutoUpdatingDetokenizer(*token_databases)
+        detokenizer.show_errors = True
 
     if not proto_globs:
         proto_globs = ['**/*.proto']
@@ -263,7 +261,11 @@
 
     timestamp_decoder = None
     if socket_addr is None:
-        serial_device = serial_impl(device, baudrate, timeout=1)
+        serial_device = serial_impl(
+            device,
+            baudrate,
+            timeout=0,  # Non-blocking mode
+        )
         read = lambda: serial_device.read(8192)
         write = serial_device.write
 
diff --git a/pw_tokenizer/docs.rst b/pw_tokenizer/docs.rst
index e124e12..dc908ea 100644
--- a/pw_tokenizer/docs.rst
+++ b/pw_tokenizer/docs.rst
@@ -954,6 +954,44 @@
     TransmitLogMessage(base64_buffer, base64_size);
   }
 
+Investigating undecoded messages
+--------------------------------
+Tokenized messages cannot be decoded if the token is not recognized. The Python
+package includes the ``parse_message`` tool, which parses tokenized Base64
+messages without looking up the token in a database. This tool attempts to guess
+the types of the arguments and displays potential ways to decode them.
+
+This tool can be used to extract argument information from an otherwise unusable
+message. It could help identify which statement in the code produced the
+message. This tool is not particularly helpful for tokenized messages without
+arguments, since all it can do is show the value of the unknown token.
+
+The tool is executed by passing Base64 tokenized messages, with or without the
+``$`` prefix, to ``pw_tokenizer.parse_message``. Pass ``-h`` or ``--help`` to
+see full usage information.
+
+Example
+^^^^^^^
+.. code-block::
+
+  $ python -m pw_tokenizer.parse_message '$329JMwA=' koSl524TRkFJTEVEX1BSRUNPTkRJVElPTgJPSw== --specs %s %d
+
+  INF Decoding arguments for '$329JMwA='
+  INF Binary: b'\xdfoI3\x00' [df 6f 49 33 00] (5 bytes)
+  INF Token:  0x33496fdf
+  INF Args:   b'\x00' [00] (1 bytes)
+  INF Decoding with up to 8 %s or %d arguments
+  INF   Attempt 1: [%s]
+  INF   Attempt 2: [%d] 0
+
+  INF Decoding arguments for '$koSl524TRkFJTEVEX1BSRUNPTkRJVElPTgJPSw=='
+  INF Binary: b'\x92\x84\xa5\xe7n\x13FAILED_PRECONDITION\x02OK' [92 84 a5 e7 6e 13 46 41 49 4c 45 44 5f 50 52 45 43 4f 4e 44 49 54 49 4f 4e 02 4f 4b] (28 bytes)
+  INF Token:  0xe7a58492
+  INF Args:   b'n\x13FAILED_PRECONDITION\x02OK' [6e 13 46 41 49 4c 45 44 5f 50 52 45 43 4f 4e 44 49 54 49 4f 4e 02 4f 4b] (24 bytes)
+  INF Decoding with up to 8 %s or %d arguments
+  INF   Attempt 1: [%d %s %d %d %d] 55 FAILED_PRECONDITION 1 -40 -38
+  INF   Attempt 2: [%d %s %s] 55 FAILED_PRECONDITION OK
+
 Command line utilities
 ^^^^^^^^^^^^^^^^^^^^^^
 ``pw_tokenizer`` provides two standalone command line utilities for detokenizing
diff --git a/pw_tokenizer/py/BUILD.gn b/pw_tokenizer/py/BUILD.gn
index b0006ed..2ac3044 100644
--- a/pw_tokenizer/py/BUILD.gn
+++ b/pw_tokenizer/py/BUILD.gn
@@ -40,6 +40,7 @@
     "pw_tokenizer/detokenize.py",
     "pw_tokenizer/elf_reader.py",
     "pw_tokenizer/encode.py",
+    "pw_tokenizer/parse_message.py",
     "pw_tokenizer/proto/__init__.py",
     "pw_tokenizer/serial_detokenizer.py",
     "pw_tokenizer/tokens.py",
diff --git a/pw_tokenizer/py/decode_test.py b/pw_tokenizer/py/decode_test.py
index c0c4366..be08eb8 100755
--- a/pw_tokenizer/py/decode_test.py
+++ b/pw_tokenizer/py/decode_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# Copyright 2020 The Pigweed Authors
+# Copyright 2022 The Pigweed Authors
 #
 # Licensed under the Apache License, Version 2.0 (the "License"); you may not
 # use this file except in compliance with the License. You may obtain a copy of
@@ -14,6 +14,7 @@
 # the License.
 """Tests the tokenized string decode module."""
 
+from datetime import datetime
 import unittest
 
 import tokenized_string_decoding_test_data as tokenized_string
@@ -21,7 +22,7 @@
 from pw_tokenizer import decode
 
 
-def error(msg, value=None):
+def error(msg, value=None) -> str:
     """Formats msg as the message for an argument that failed to parse."""
     if value is None:
         return '<[{}]>'.format(msg)
@@ -30,13 +31,13 @@
 
 class TestDecodeTokenized(unittest.TestCase):
     """Tests decoding tokenized strings with various arguments."""
-    def test_decode_generated_data(self):
+    def test_decode_generated_data(self) -> None:
         self.assertGreater(len(tokenized_string.TEST_DATA), 100)
 
         for fmt, decoded, encoded in tokenized_string.TEST_DATA:
             self.assertEqual(decode.decode(fmt, encoded, True), decoded)
 
-    def test_unicode_decode_errors(self):
+    def test_unicode_decode_errors(self) -> None:
         """Tests unicode errors, which do not occur in the C++ decoding code."""
         self.assertEqual(decode.decode('Why, %c', b'\x01', True),
                          'Why, ' + error('%c ERROR', -1))
@@ -55,12 +56,12 @@
         self.assertEqual(decode.decode('%c', b'\xff\xff\xff\xff\x0f', True),
                          error('%c ERROR', -2147483648))
 
-    def test_ignore_errors(self):
+    def test_ignore_errors(self) -> None:
         self.assertEqual(decode.decode('Why, %c', b'\x01'), 'Why, %c')
 
         self.assertEqual(decode.decode('%s %d', b'\x01!'), '! %d')
 
-    def test_pointer(self):
+    def test_pointer(self) -> None:
         """Tests pointer args, which are not natively supported in Python."""
         self.assertEqual(decode.decode('Hello: %p', b'\x00', True),
                          'Hello: 0x00000000')
@@ -69,8 +70,8 @@
 
 
 class TestIntegerDecoding(unittest.TestCase):
-    """Test decoding variable-length integers."""
-    def test_decode_generated_data(self):
+    """Tests decoding variable-length integers."""
+    def test_decode_generated_data(self) -> None:
         test_data = varint_test_data.TEST_DATA
         self.assertGreater(len(test_data), 100)
 
@@ -86,5 +87,44 @@
                     bytearray(encoded)).value)
 
 
+class TestFormattedString(unittest.TestCase):
+    """Tests scoring how successfully a formatted string decoded."""
+    def test_no_args(self) -> None:
+        result = decode.FormatString('string').format(b'')
+
+        self.assertTrue(result.ok())
+        self.assertEqual(result.score(), (True, True, 0, 0, datetime.max))
+
+    def test_one_arg(self) -> None:
+        result = decode.FormatString('%d').format(b'\0')
+
+        self.assertTrue(result.ok())
+        self.assertEqual(result.score(), (True, True, 0, 1, datetime.max))
+
+    def test_missing_args(self) -> None:
+        result = decode.FormatString('%p%d%d').format(b'\x02\x80')
+
+        self.assertFalse(result.ok())
+        self.assertEqual(result.score(), (False, True, -2, 3, datetime.max))
+        self.assertGreater(result.score(), result.score(datetime.now()))
+        self.assertGreater(result.score(datetime.now()),
+                           result.score(datetime.min))
+
+    def test_compare_score(self) -> None:
+        all_args_ok = decode.FormatString('%d%d%d').format(b'\0\0\0')
+        missing_one_arg = decode.FormatString('%d%d%d').format(b'\0\0')
+        missing_two_args = decode.FormatString('%d%d%d').format(b'\0')
+        all_args_extra_data = decode.FormatString('%d%d%d').format(b'\0\0\0\1')
+        missing_one_arg_extra_data = decode.FormatString('%d%d%d').format(
+            b'\0' + b'\x80' * 100)
+
+        self.assertGreater(all_args_ok.score(), missing_one_arg.score())
+        self.assertGreater(missing_one_arg.score(), missing_two_args.score())
+        self.assertGreater(missing_two_args.score(),
+                           all_args_extra_data.score())
+        self.assertGreater(all_args_extra_data.score(),
+                           missing_one_arg_extra_data.score())
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/pw_tokenizer/py/pw_tokenizer/decode.py b/pw_tokenizer/py/pw_tokenizer/decode.py
index 30d8771..f6ca503 100644
--- a/pw_tokenizer/py/pw_tokenizer/decode.py
+++ b/pw_tokenizer/py/pw_tokenizer/decode.py
@@ -20,6 +20,7 @@
 in the resulting string with an error message.
 """
 
+from datetime import datetime
 import re
 import struct
 from typing import Iterable, List, NamedTuple, Match, Sequence, Tuple
@@ -275,7 +276,7 @@
         return self.format()
 
     def __repr__(self) -> str:
-        return 'DecodedArg({!r})'.format(self)
+        return f'DecodedArg({self})'
 
 
 def parse_format_specifiers(format_string: str) -> Iterable[FormatSpec]:
@@ -288,6 +289,33 @@
     args: Sequence[DecodedArg]
     remaining: bytes
 
+    def ok(self) -> bool:
+        """Arg data decoded successfully and all expected args were found."""
+        return all(arg.ok() for arg in self.args) and not self.remaining
+
+    def score(self, date_removed: datetime = None) -> tuple:
+        """Returns a key for sorting by how successful a decode was.
+
+        Decoded strings are sorted by whether they
+
+          1. decoded all bytes for all arguments without errors,
+          2. decoded all data,
+          3. have the fewest decoding errors,
+          4. decoded the most arguments successfully, or
+          5. have the most recent removal date, if they were removed.
+
+        This must match the collision resolution logic in detokenize.cc.
+
+        To format a list of FormattedStrings from most to least successful,
+        use sort(key=FormattedString.score, reverse=True).
+        """
+        return (
+            self.ok(),  # decocoded all data and all expected args were found
+            not self.remaining,  # decoded all data
+            -sum(not arg.ok() for arg in self.args),  # fewest errors
+            len(self.args),  # decoded the most arguments
+            date_removed or datetime.max)  # most recently present
+
 
 class FormatString:
     """Represents a printf-style format string."""
diff --git a/pw_tokenizer/py/pw_tokenizer/detokenize.py b/pw_tokenizer/py/pw_tokenizer/detokenize.py
index ddd698d..8f94fa0 100755
--- a/pw_tokenizer/py/pw_tokenizer/detokenize.py
+++ b/pw_tokenizer/py/pw_tokenizer/detokenize.py
@@ -34,7 +34,6 @@
 import argparse
 import base64
 import binascii
-from datetime import datetime
 import io
 import logging
 import os
@@ -44,8 +43,9 @@
 import struct
 import sys
 import time
-from typing import (AnyStr, BinaryIO, Callable, Dict, List, Iterable, Iterator,
-                    Match, NamedTuple, Optional, Pattern, Tuple, Union)
+from typing import (AnyStr, BinaryIO, Callable, Dict, List, Iterable, IO,
+                    Iterator, Match, NamedTuple, Optional, Pattern, Tuple,
+                    Union)
 
 try:
     from pw_tokenizer import database, decode, encode, tokens
@@ -82,25 +82,7 @@
         for entry, fmt in format_string_entries:
             result = fmt.format(encoded_message[ENCODED_TOKEN.size:],
                                 show_errors)
-
-            # Sort competing entries so the most likely matches appear first.
-            # Decoded strings are prioritized by whether they
-            #
-            #   1. decoded all bytes for all arguments without errors,
-            #   2. decoded all data,
-            #   3. have the fewest decoding errors,
-            #   4. decoded the most arguments successfully, or
-            #   5. have the most recent removal date, if they were removed.
-            #
-            # This must match the collision resolution logic in detokenize.cc.
-            score: Tuple = (
-                all(arg.ok() for arg in result.args) and not result.remaining,
-                not result.remaining,  # decoded all data
-                -sum(not arg.ok() for arg in result.args),  # fewest errors
-                len(result.args),  # decoded the most arguments
-                entry.date_removed or datetime.max)  # most recently present
-
-            decode_attempts.append((score, result))
+            decode_attempts.append((result.score(entry.date_removed), result))
 
         # Sort the attempts by the score so the most likely results are first.
         decode_attempts.sort(key=lambda value: value[0], reverse=True)
@@ -299,11 +281,14 @@
         return decode_and_detokenize
 
 
+_PathOrFile = Union[IO, str, Path]
+
+
 class AutoUpdatingDetokenizer(Detokenizer):
     """Loads and updates a detokenizer from database paths."""
     class _DatabasePath:
         """Tracks the modified time of a path or file object."""
-        def __init__(self, path):
+        def __init__(self, path: _PathOrFile) -> None:
             self.path = path if isinstance(path, (str, Path)) else path.name
             self._modified_time: Optional[float] = self._last_modified_time()
 
@@ -329,7 +314,7 @@
                 return database.load_token_database()
 
     def __init__(self,
-                 *paths_or_files,
+                 *paths_or_files: _PathOrFile,
                  min_poll_period_s: float = 1.0) -> None:
         self.paths = tuple(self._DatabasePath(path) for path in paths_or_files)
         self.min_poll_period_s = min_poll_period_s
diff --git a/pw_tokenizer/py/pw_tokenizer/parse_message.py b/pw_tokenizer/py/pw_tokenizer/parse_message.py
new file mode 100644
index 0000000..f8655e1
--- /dev/null
+++ b/pw_tokenizer/py/pw_tokenizer/parse_message.py
@@ -0,0 +1,182 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Parses the arguments in a Base64-encoded tokenized message.
+
+This is useful for attempting to decode tokenized messages with arguments for
+which the token is not recognized.
+"""
+
+import argparse
+import base64
+from dataclasses import dataclass
+import logging
+import sys
+from typing import Collection, Iterable, Iterator, Sequence
+
+import pw_cli.log
+from pw_tokenizer.decode import FormatString, FormattedString
+
+_LOG: logging.Logger = logging.getLogger('pw_tokenizer')
+
+DEFAULT_FORMAT_SPECS = (
+    '%s',
+    '%d',
+    '%f',
+)
+
+DEFAULT_MAX_ARGS = 8
+PREFIX = '$'
+
+
+def attempt_to_decode(
+        arg_data: bytes,
+        format_specs: Collection[str] = DEFAULT_FORMAT_SPECS,
+        max_args: int = DEFAULT_MAX_ARGS,
+        yield_failures: bool = False) -> Iterator[FormattedString]:
+    """Attemps to decode arguments using the provided format specifiers."""
+    format_strings = [(0, '')]  # (argument count, format string)
+
+    # Each argument requires at least 1 byte.
+    max_args = min(max_args, len(arg_data))
+
+    while format_strings:
+        arg_count, string = format_strings.pop(0)
+        decode_attempt = FormatString(string).format(arg_data)
+
+        if yield_failures or decode_attempt.ok():
+            yield decode_attempt
+
+        if arg_count < max_args:
+            format_strings.extend(
+                (arg_count + 1, string + spec) for spec in format_specs)
+
+
+@dataclass(frozen=True)
+class TokenizedMessage:
+    string: str
+    binary: bytes
+
+    @property
+    def token(self) -> int:
+        return int.from_bytes(self.binary[:4], 'little')
+
+    @property
+    def binary_args(self) -> bytes:
+        return self.binary[4:]
+
+    @classmethod
+    def parse(cls, message: str, prefix: str = '$') -> 'TokenizedMessage':
+        if not message.startswith(prefix):
+            raise ValueError(
+                f'{message} does not start wtih {prefix!r} as expected')
+
+        binary = base64.b64decode(message[1:])
+
+        if len(binary) < 4:
+            raise ValueError(f'{message} is only {len(binary)} bytes; '
+                             'tokenized messages must be at least 4 bytes')
+
+        return cls(message, binary)
+
+
+def _read_stdin():
+    try:
+        while True:
+            yield input()
+    except KeyboardInterrupt:
+        return
+
+
+def _text_list(items: Sequence, conjunction: str = 'or') -> str:
+    if len(items) == 1:
+        return str(items[0])
+
+    return f'{", ".join(str(i) for i in items[:-1])} {conjunction} {items[-1]}'
+
+
+def main(messages: Iterable[str], max_args: int, specs: Sequence[str],
+         show_failures: bool) -> int:
+    """Parses the arguments for a series of tokenized messages."""
+    exit_code = 0
+
+    for message in iter(messages) if messages else _read_stdin():
+        if not message:
+            continue
+
+        if not message.startswith(PREFIX):
+            message = PREFIX + message
+
+        _LOG.info('Decoding arguments for %r', message)
+        try:
+            parsed = TokenizedMessage.parse(message)
+        except ValueError as exc:
+            _LOG.error('%s', exc)
+            exit_code = 2
+            continue
+
+        _LOG.info('Binary: %r [%s] (%d bytes)', parsed.binary,
+                  parsed.binary.hex(' ', 1), len(parsed.binary))
+        _LOG.info('Token:  0x%08x', parsed.token)
+        _LOG.info('Args:   %r [%s] (%d bytes)', parsed.binary_args,
+                  parsed.binary_args.hex(' ', 1), len(parsed.binary_args))
+        _LOG.info('Decoding with up to %d %s arguments', max_args,
+                  _text_list(specs))
+
+        results = sorted(attempt_to_decode(parsed.binary_args, specs, max_args,
+                                           show_failures),
+                         key=FormattedString.score,
+                         reverse=True)
+
+        if not any(result.ok() for result in results):
+            _LOG.warning(
+                '  No combinations of up to %d %s arguments decoded '
+                'successfully', max_args, _text_list(specs))
+            exit_code = 1
+
+        for i, result in enumerate(results, 1):
+            _LOG.info(  # pylint: disable=logging-fstring-interpolation
+                f'  Attempt %{len(str(len(results)))}d: [%s] %s', i,
+                ' '.join(str(a.specifier) for a in result.args),
+                ' '.join(str(a) for a in result.args))
+        print()
+
+    return exit_code
+
+
+def _parse_args() -> argparse.Namespace:
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument('--max-args',
+                        default=DEFAULT_MAX_ARGS,
+                        type=int,
+                        help='Maximum number of printf-style arguments')
+    parser.add_argument('--specs',
+                        nargs='*',
+                        default=DEFAULT_FORMAT_SPECS,
+                        help='Which printf-style format specifiers to check')
+    parser.add_argument('--show-failures',
+                        action='store_true',
+                        help='Show argument combintations that fail to decode')
+    parser.add_argument(
+        'messages',
+        nargs='*',
+        help=
+        'Base64-encoded tokenized messages to decode; omit to read from stdin')
+    return parser.parse_args()
+
+
+if __name__ == '__main__':
+    pw_cli.log.install()
+    sys.exit(main(**vars(_parse_args())))
diff --git a/pw_transfer/py/tests/python_cpp_transfer_test.py b/pw_transfer/py/tests/python_cpp_transfer_test.py
index 136a188..62e0d14 100755
--- a/pw_transfer/py/tests/python_cpp_transfer_test.py
+++ b/pw_transfer/py/tests/python_cpp_transfer_test.py
@@ -44,10 +44,13 @@
         self.directory = Path(self._tempdir.name)
 
         command = (*self.test_server_command, str(self.directory))
+        self._outgoing_filter = rpc.PacketFilter('outgoing RPC')
+        self._incoming_filter = rpc.PacketFilter('incoming RPC')
         self._context = rpc.HdlcRpcLocalServerAndClient(
             command,
             self.port, [transfer_pb2, test_server_pb2],
-            for_testing=True)
+            outgoing_processor=self._outgoing_filter,
+            incoming_processor=self._incoming_filter)
 
         service = self._context.client.channel(1).rpcs.pw.transfer.Transfer
         self.manager = pw_transfer.Manager(
@@ -148,12 +151,12 @@
         self.set_content(34, 'junk')
 
         # Allow the initial packet and first chunk, then drop the second chunk.
-        self._context.outgoing_packets.keep(2)
-        self._context.outgoing_packets.drop(1)
+        self._outgoing_filter.keep(2)
+        self._outgoing_filter.drop(1)
 
         # Allow the initial transfer parameters updates, then drop the next two.
-        self._context.incoming_packets.keep(1)
-        self._context.incoming_packets.drop(2)
+        self._incoming_filter.keep(1)
+        self._incoming_filter.drop(2)
 
         with self.assertLogs('pw_transfer', 'DEBUG') as logs:
             self.manager.write(34, _DATA_4096B)
@@ -169,8 +172,8 @@
     def test_write_regularly_drop_packets(self) -> None:
         self.set_content(35, 'junk')
 
-        self._context.outgoing_packets.drop_every(5)  # drop one per window
-        self._context.incoming_packets.drop_every(3)
+        self._outgoing_filter.drop_every(5)  # drop one per window
+        self._incoming_filter.drop_every(3)
 
         self.manager.write(35, _DATA_4096B)
 
@@ -184,16 +187,16 @@
             self.set_content(seed, 'junk')
 
             rand = random.Random(seed)
-            self._context.incoming_packets.randomly_drop(3, rand)
-            self._context.outgoing_packets.randomly_drop(3, rand)
+            self._incoming_filter.randomly_drop(3, rand)
+            self._outgoing_filter.randomly_drop(3, rand)
 
             data = bytes(
                 rand.randrange(256) for _ in range(rand.randrange(16384)))
             self.manager.write(seed, data)
             self.assertEqual(self.get_content(seed), data)
 
-            self._context.incoming_packets.reset()
-            self._context.outgoing_packets.reset()
+            self._incoming_filter.reset()
+            self._outgoing_filter.reset()
 
 
 def _main(test_server_command: List[str], port: int,
diff --git a/pw_watch/py/pw_watch/watch.py b/pw_watch/py/pw_watch/watch.py
index 8f3d291..fe495ef 100755
--- a/pw_watch/py/pw_watch/watch.py
+++ b/pw_watch/py/pw_watch/watch.py
@@ -38,6 +38,7 @@
 import argparse
 from dataclasses import dataclass
 import errno
+from itertools import zip_longest
 import logging
 import os
 from pathlib import Path
@@ -109,6 +110,8 @@
                  ░  ░ ░       ░  ░
 """
 
+_FULLSCREEN_STATUS_COLUMN_WIDTH = 10
+
 
 # TODO(keir): Figure out a better strategy for exiting. The problem with the
 # watcher is that doing a "clean exit" is slow. However, by directly exiting,
@@ -292,8 +295,10 @@
         self._clear_screen()
 
         if self.fullscreen_enabled:
-            msg = 'Watching for changes. Ctrl-C to exit; enter to rebuild'
-            self.result_message = [('', msg)]
+            self.create_result_message()
+            _LOG.info(
+                _COLOR.green(
+                    'Watching for changes. Ctrl-d to exit; enter to rebuild'))
         else:
             for line in pw_cli.branding.banner().splitlines():
                 _LOG.info(line)
@@ -327,6 +332,33 @@
                 tag = '(FAIL)'
 
             _LOG.log(level, '%s Finished build: %s %s', index, cmd, tag)
+            self.create_result_message()
+
+    def create_result_message(self):
+        if not self.fullscreen_enabled:
+            return
+
+        self.result_message = []
+        first_building_target_found = False
+        for (succeeded, command) in zip_longest(self.builds_succeeded,
+                                                self.build_commands):
+            if succeeded:
+                self.result_message.append(
+                    ('class:theme-fg-green',
+                     'OK'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH)))
+            elif succeeded is None and not first_building_target_found:
+                first_building_target_found = True
+                self.result_message.append(
+                    ('class:theme-fg-yellow',
+                     'Building'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH)))
+            elif first_building_target_found:
+                self.result_message.append(
+                    ('', ''.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH)))
+            else:
+                self.result_message.append(
+                    ('class:theme-fg-red',
+                     'Failed'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH)))
+            self.result_message.append(('', f'  {command}\n'))
 
     def _run_build(self, index: str, cmd: BuildCommand, env: dict) -> bool:
         # Make sure there is a build.ninja file for Ninja to use.
@@ -352,7 +384,9 @@
     def _execute_command(self, command: list, env: dict) -> bool:
         """Runs a command with a blank before/after for visual separation."""
         self.current_build_errors = 0
-        self.status_message = ('ansiyellow', 'Building')
+        self.status_message = (
+            'class:theme-fg-yellow',
+            'Building'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH))
         if self.fullscreen_enabled:
             return self._execute_command_watch_app(command, env)
         print()
@@ -432,26 +466,23 @@
     def on_complete(self, cancelled: bool = False) -> None:
         # First, use the standard logging facilities to report build status.
         if cancelled:
-            self.status_message = ('', 'Cancelled')
+            self.status_message = (
+                '', 'Cancelled'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH))
             _LOG.error('Finished; build was interrupted')
         elif all(self.builds_succeeded):
-            self.status_message = ('ansigreen', 'Succeeded')
+            self.status_message = (
+                'class:theme-fg-green',
+                'Succeeded'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH))
             _LOG.info('Finished; all successful')
         else:
-            self.status_message = ('ansired', 'Failed')
+            self.status_message = (
+                'class:theme-fg-red',
+                'Failed'.rjust(_FULLSCREEN_STATUS_COLUMN_WIDTH))
             _LOG.info('Finished; some builds failed')
 
         # Show individual build results for fullscreen app
         if self.fullscreen_enabled:
-            self.result_message = []
-            for (succeeded, cmd) in zip(self.builds_succeeded,
-                                        self.build_commands):
-                if succeeded:
-                    self.result_message.append(
-                        ('class:theme-fg-green', 'OK  '))
-                else:
-                    self.result_message.append(('class:theme-fg-red', 'FAIL'))
-                self.result_message.append(('', f'  {cmd}\n'))
+            self.create_result_message()
         # For non-fullscreen pw watch
         else:
             # Show a more distinct colored banner.
diff --git a/pw_watch/py/pw_watch/watch_app.py b/pw_watch/py/pw_watch/watch_app.py
index 0f8c93f..61f7fd9 100644
--- a/pw_watch/py/pw_watch/watch_app.py
+++ b/pw_watch/py/pw_watch/watch_app.py
@@ -37,6 +37,8 @@
 from prompt_toolkit.layout import (
     Dimension,
     DynamicContainer,
+    Float,
+    FloatContainer,
     FormattedTextControl,
     HSplit,
     Layout,
@@ -51,7 +53,9 @@
 from pw_console.get_pw_console_app import PW_CONSOLE_APP_CONTEXTVAR
 from pw_console.log_pane import LogPane
 from pw_console.plugin_mixin import PluginMixin
+from pw_console.quit_dialog import QuitDialog
 import pw_console.style
+import pw_console.widgets.border
 from pw_console.window_manager import WindowManager
 
 _NINJA_LOG = logging.getLogger('pw_watch_ninja_output')
@@ -90,18 +94,7 @@
 
         self.prefs = ConsolePrefs()
 
-        key_bindings = KeyBindings()
-
-        @key_bindings.add('c-c', filter=self.input_box_not_focused())
-        def _quit(_event):
-            "Quit."
-            _LOG.info('Got quit signal; exiting...')
-            self.exit(0)
-
-        @key_bindings.add('enter', filter=self.input_box_not_focused())
-        def _run_build(_event):
-            "Rebuild."
-            self.run_build()
+        self.quit_dialog = QuitDialog(self, self.exit)  # type: ignore
 
         self.search_history_filename = self.prefs.search_history
         # History instance for search toolbars.
@@ -156,22 +149,64 @@
         self.window_manager_container = (
             self.window_manager.create_root_container())
 
-        self.root_container = HSplit([
-            # The top toolbar.
-            Window(
-                content=FormattedTextControl(self.get_statusbar_text),
-                height=Dimension.exact(1),
-                style='class:toolbar_inactive',
-            ),
-            # Result Toolbar.
-            Window(
-                content=FormattedTextControl(self.get_resultbar_text),
-                height=lambda: len(self.event_handler.build_commands),
-                style='class:toolbar_inactive',
-            ),
-            # The main content.
-            DynamicContainer(lambda: self.window_manager_container),
-        ])
+        self.status_bar_border_style = 'class:command-runner-border'
+
+        self.root_container = FloatContainer(
+            HSplit([
+                pw_console.widgets.border.create_border(
+                    HSplit([
+                        # The top toolbar.
+                        Window(
+                            content=FormattedTextControl(
+                                self.get_statusbar_text),
+                            height=Dimension.exact(1),
+                            style='class:toolbar_inactive',
+                        ),
+                        # Result Toolbar.
+                        Window(
+                            content=FormattedTextControl(
+                                self.get_resultbar_text),
+                            height=lambda: len(self.event_handler.
+                                               build_commands),
+                            style='class:toolbar_inactive',
+                        ),
+                    ]),
+                    border_style=lambda: self.status_bar_border_style,
+                    base_style='class:toolbar_inactive',
+                    left_margin_columns=1,
+                    right_margin_columns=1,
+                ),
+                # The main content.
+                DynamicContainer(lambda: self.window_manager_container),
+            ]),
+            floats=[
+                Float(
+                    content=self.quit_dialog,
+                    top=2,
+                    left=2,
+                ),
+            ],
+        )
+
+        key_bindings = KeyBindings()
+
+        @key_bindings.add('enter', filter=self.input_box_not_focused())
+        def _run_build(_event):
+            "Rebuild."
+            self.run_build()
+
+        register = self.prefs.register_keybinding
+
+        @register('global.exit-no-confirmation', key_bindings)
+        def _quit_no_confirm(_event):
+            """Quit without confirmation."""
+            _LOG.info('Got quit signal; exiting...')
+            self.exit(0)
+
+        @register('global.exit-with-confirmation', key_bindings)
+        def _quit_with_confirm(_event):
+            """Quit with confirmation dialog."""
+            self.quit_dialog.open_dialog()
 
         self.key_bindings = merge_key_bindings([
             self.window_manager.key_bindings,
@@ -185,9 +220,11 @@
             Style.from_dict({'search': 'bg:ansired ansiblack'}),
         ])
 
+        self.layout = Layout(self.root_container,
+                             focused_element=self.ninja_log_pane)
+
         self.application: Application = Application(
-            layout=Layout(self.root_container,
-                          focused_element=self.ninja_log_pane),
+            layout=self.layout,
             key_bindings=self.key_bindings,
             mouse_support=True,
             color_depth=self.color_depth,
@@ -200,7 +237,7 @@
 
         self.plugin_init(
             plugin_callback=self.check_build_status,
-            plugin_callback_frequency=1.0,
+            plugin_callback_frequency=0.5,
             plugin_logger_name='pw_watch_stdout_checker',
         )
 
@@ -229,6 +266,14 @@
         """Set application focus to a specific container."""
         self.application.layout.focus(pane)
 
+    def focused_window(self):
+        """Return the currently focused window."""
+        return self.application.layout.current_window
+
+    def command_runner_is_open(self) -> bool:
+        # pylint: disable=no-self-use
+        return False
+
     def clear_ninja_log(self) -> None:
         self.ninja_log_view.log_store.clear_logs()
         self.ninja_log_view._restart_filtering()  # pylint: disable=protected-access
@@ -249,20 +294,23 @@
         is_building = False
         if status:
             fragments = [status]
-            is_building = status[1] == 'Building'
+            is_building = status[1].endswith('Building')
         separator = ('', '  ')
+        self.status_bar_border_style = 'class:theme-fg-green'
 
         if is_building:
             percent = self.event_handler.current_build_percent
             percent *= 100
             fragments.append(separator)
             fragments.append(('ansicyan', '{:.0f}%'.format(percent)))
+            self.status_bar_border_style = 'class:theme-fg-yellow'
 
         if self.event_handler.current_build_errors > 0:
             fragments.append(separator)
             fragments.append(('', 'Errors:'))
             fragments.append(
                 ('ansired', str(self.event_handler.current_build_errors)))
+            self.status_bar_border_style = 'class:theme-fg-red'
 
         if is_building:
             fragments.append(separator)
@@ -276,7 +324,7 @@
             result = [('', 'Loading...')]
         return result
 
-    def exit(self, exit_code: int) -> None:
+    def exit(self, exit_code: int = 0) -> None:
         log_file = self.external_logfile
 
         def _really_exit(future: asyncio.Future) -> NoReturn:
diff --git a/targets/emcraft_sf2_som/BUILD.gn b/targets/emcraft_sf2_som/BUILD.gn
index 007fa46..9d77869 100644
--- a/targets/emcraft_sf2_som/BUILD.gn
+++ b/targets/emcraft_sf2_som/BUILD.gn
@@ -18,8 +18,10 @@
 import("$dir_pw_docgen/docs.gni")
 import("$dir_pw_malloc/backend.gni")
 import("$dir_pw_system/system_target.gni")
+import("$dir_pw_third_party/smartfusion_mss/mss.gni")
 import("$dir_pw_tokenizer/backend.gni")
 import("$dir_pw_toolchain/generate_toolchain.gni")
+
 config("pw_malloc_active") {
   if (pw_malloc_BACKEND != "") {
     defines = [ "PW_MALLOC_ACTIVE=1" ]
@@ -35,9 +37,9 @@
       "$dir_pw_malloc",
       "$dir_pw_preprocessor",
       "$dir_pw_string",
+      "$dir_pw_sys_io_emcraft_sf2",
       "$dir_pw_system",
       "$dir_pw_third_party/freertos",
-      "$dir_pw_third_party/smartfusion_mss",
     ]
     sources = [
       "boot.cc",
@@ -51,7 +53,9 @@
 
   pw_source_set("sf2_mss_hal_config") {
     public_configs = [ ":config_includes" ]
-    public = [ "config/sf2_mss_hal_conf.h" ]
+    public =
+        [ "config/sf2_mss_hal_conf.h" ]  # SKEYS likely want to put the MDDR
+                                         # config by cortex etc stuff here
   }
 
   pw_source_set("sf2_freertos_config") {
@@ -61,17 +65,68 @@
   }
 }
 
+# Configured for use with a first stage boot loader to configure DDR and
+# perform memory remapping.
 pw_system_target("emcraft_sf2_som") {
   cpu = PW_SYSTEM_CPU.CORTEX_M3
   scheduler = PW_SYSTEM_SCHEDULER.FREERTOS
-
   link_deps = [ "$dir_pigweed/targets/emcraft_sf2_som:pre_init" ]
+
   build_args = {
     pw_log_BACKEND = dir_pw_log_tokenized
-    pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND = "//pw_system:log"
+    pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND =
+        "$dir_pw_system:log_backend.impl"
     pw_third_party_freertos_CONFIG =
         "$dir_pigweed/targets/emcraft_sf2_som:sf2_freertos_config"
     pw_third_party_freertos_PORT = "$dir_pw_third_party/freertos:arm_cm3"
+    pw_sys_io_BACKEND = dir_pw_sys_io_emcraft_sf2
+
+    # Non-debug build for use with the boot loader.
+    pw_boot_cortex_m_LINK_CONFIG_DEFINES = [
+      "PW_BOOT_FLASH_BEGIN=0x00000200",  # After vector table.
+
+      # TODO(skeys) Bootloader is capable of loading 16M of uncompressed code
+      # from SPI flash to external RAM. For now use the allocated eNVM flash
+      # (256K - Bootloader - InSystemProgrammer = 192K)
+      "PW_BOOT_FLASH_SIZE=0x30000",
+
+      # TODO(pwbug/219): Currently "pw_tokenizer/detokenize_test" requires at
+      # least 6K bytes in heap when using pw_malloc_freelist. The heap size
+      # required for tests should be investigated.
+      "PW_BOOT_HEAP_SIZE=4M",
+
+      # With external RAM remapped, we use the entire internal ram for the
+      # stack (64K).
+      "PW_BOOT_MIN_STACK_SIZE=1024K",
+
+      # Using external DDR RAM, we just need to make sure we go past our ROM
+      # sections.
+      "PW_BOOT_RAM_BEGIN=0xA1000000",
+
+      # We assume that the bootloader loaded all 16M of text.
+      "PW_BOOT_RAM_SIZE=48M",
+      "PW_BOOT_VECTOR_TABLE_BEGIN=0x00000000",
+      "PW_BOOT_VECTOR_TABLE_SIZE=512",
+    ]
+  }
+}
+
+# Debug target configured to work with MSS linker script and startup code.
+# TODO(skeys) Add linker script and config for debug builds using SoftConsole.
+pw_system_target("emcraft_sf2_som_debug") {
+  cpu = PW_SYSTEM_CPU.CORTEX_M3
+  scheduler = PW_SYSTEM_SCHEDULER.FREERTOS
+  link_deps = [ "$dir_pigweed/targets/emcraft_sf2_som:pre_init" ]
+
+  build_args = {
+    pw_log_BACKEND = dir_pw_log_tokenized
+    pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND =
+        "$dir_pw_system:log_backend.impl"
+    pw_third_party_freertos_CONFIG =
+        "$dir_pigweed/targets/emcraft_sf2_som:sf2_freertos_config"
+    pw_third_party_freertos_PORT = "$dir_pw_third_party/freertos:arm_cm3"
+    pw_sys_io_BACKEND = dir_pw_sys_io_emcraft_sf2
+
     pw_boot_cortex_m_LINK_CONFIG_DEFINES = [
       "PW_BOOT_FLASH_BEGIN=0x00000200",
       "PW_BOOT_FLASH_SIZE=200K",
diff --git a/targets/emcraft_sf2_som/boot.cc b/targets/emcraft_sf2_som/boot.cc
index 284860b..bb786c8 100644
--- a/targets/emcraft_sf2_som/boot.cc
+++ b/targets/emcraft_sf2_som/boot.cc
@@ -17,14 +17,19 @@
 #include <array>
 
 #include "FreeRTOS.h"
+#include "config/sf2_mss_hal_conf.h"
+#include "m2sxxx.h"
 #include "pw_boot_cortex_m/boot.h"
 #include "pw_malloc/malloc.h"
 #include "pw_preprocessor/compiler.h"
 #include "pw_string/util.h"
 #include "pw_sys_io_emcraft_sf2/init.h"
 #include "pw_system/init.h"
+#include "system_m2sxxx.h"
 #include "task.h"
 
+#include liberosoc_CONFIG_FILE
+
 namespace {
 
 std::array<StackType_t, configMINIMAL_STACK_SIZE> freertos_idle_stack;
@@ -68,7 +73,98 @@
   *pulIdleTaskStackSize = freertos_timer_stack.size();
 }
 
-extern "C" void pw_boot_PreStaticMemoryInit() {}
+extern "C" void pw_boot_PreStaticMemoryInit() {
+#if SF2_MSS_NO_BOOTLOADER
+  SystemInit();
+  // Initialize DDR
+  // inclusive-language: disable
+  MDDR->core.ddrc.DYN_SOFT_RESET_CR = 0x0000;
+  MDDR->core.ddrc.DYN_REFRESH_1_CR = 0x27de;
+  MDDR->core.ddrc.DYN_REFRESH_2_CR = 0x030f;
+  MDDR->core.ddrc.DYN_POWERDOWN_CR = 0x0002;
+  MDDR->core.ddrc.DYN_DEBUG_CR = 0x0000;
+  MDDR->core.ddrc.MODE_CR = 0x00C1;
+  MDDR->core.ddrc.ADDR_MAP_BANK_CR = 0x099f;
+  MDDR->core.ddrc.ECC_DATA_MASK_CR = 0x0000;
+  MDDR->core.ddrc.ADDR_MAP_COL_1_CR = 0x3333;
+  MDDR->core.ddrc.ADDR_MAP_COL_2_CR = 0xffff;
+  MDDR->core.ddrc.ADDR_MAP_ROW_1_CR = 0x7777;
+  MDDR->core.ddrc.ADDR_MAP_ROW_2_CR = 0x0fff;
+  MDDR->core.ddrc.INIT_1_CR = 0x0001;
+  MDDR->core.ddrc.CKE_RSTN_CYCLES_CR[0] = 0x4242;
+  MDDR->core.ddrc.CKE_RSTN_CYCLES_CR[1] = 0x0008;
+  MDDR->core.ddrc.INIT_MR_CR = 0x0033;
+  MDDR->core.ddrc.INIT_EMR_CR = 0x0020;
+  MDDR->core.ddrc.INIT_EMR2_CR = 0x0000;
+  MDDR->core.ddrc.INIT_EMR3_CR = 0x0000;
+  MDDR->core.ddrc.DRAM_BANK_TIMING_PARAM_CR = 0x00c0;
+  MDDR->core.ddrc.DRAM_RD_WR_LATENCY_CR = 0x0023;
+  MDDR->core.ddrc.DRAM_RD_WR_PRE_CR = 0x0235;
+  MDDR->core.ddrc.DRAM_MR_TIMING_PARAM_CR = 0x0064;
+  MDDR->core.ddrc.DRAM_RAS_TIMING_CR = 0x0108;
+  MDDR->core.ddrc.DRAM_RD_WR_TRNARND_TIME_CR = 0x0178;
+  MDDR->core.ddrc.DRAM_T_PD_CR = 0x0033;
+  MDDR->core.ddrc.DRAM_BANK_ACT_TIMING_CR = 0x1947;
+  MDDR->core.ddrc.ODT_PARAM_1_CR = 0x0010;
+  MDDR->core.ddrc.ODT_PARAM_2_CR = 0x0000;
+  MDDR->core.ddrc.ADDR_MAP_COL_3_CR = 0x3300;
+  MDDR->core.ddrc.MODE_REG_RD_WR_CR = 0x0000;
+  MDDR->core.ddrc.MODE_REG_DATA_CR = 0x0000;
+  MDDR->core.ddrc.PWR_SAVE_1_CR = 0x0514;
+  MDDR->core.ddrc.PWR_SAVE_2_CR = 0x0000;
+  MDDR->core.ddrc.ZQ_LONG_TIME_CR = 0x0200;
+  MDDR->core.ddrc.ZQ_SHORT_TIME_CR = 0x0040;
+  MDDR->core.ddrc.ZQ_SHORT_INT_REFRESH_MARGIN_CR[0] = 0x0012;
+  MDDR->core.ddrc.ZQ_SHORT_INT_REFRESH_MARGIN_CR[1] = 0x0002;
+  MDDR->core.ddrc.PERF_PARAM_1_CR = 0x4000;
+  MDDR->core.ddrc.HPR_QUEUE_PARAM_CR[0] = 0x80f8;
+  MDDR->core.ddrc.HPR_QUEUE_PARAM_CR[1] = 0x0007;
+  MDDR->core.ddrc.LPR_QUEUE_PARAM_CR[0] = 0x80f8;
+  MDDR->core.ddrc.LPR_QUEUE_PARAM_CR[1] = 0x0007;
+  MDDR->core.ddrc.WR_QUEUE_PARAM_CR = 0x0200;
+  MDDR->core.ddrc.PERF_PARAM_2_CR = 0x0001;
+  MDDR->core.ddrc.PERF_PARAM_3_CR = 0x0000;
+  MDDR->core.ddrc.DFI_RDDATA_EN_CR = 0x0003;
+  MDDR->core.ddrc.DFI_MIN_CTRLUPD_TIMING_CR = 0x0003;
+  MDDR->core.ddrc.DFI_MAX_CTRLUPD_TIMING_CR = 0x0040;
+  MDDR->core.ddrc.DFI_WR_LVL_CONTROL_CR[0] = 0x0000;
+  MDDR->core.ddrc.DFI_WR_LVL_CONTROL_CR[1] = 0x0000;
+  MDDR->core.ddrc.DFI_RD_LVL_CONTROL_CR[0] = 0x0000;
+  MDDR->core.ddrc.DFI_RD_LVL_CONTROL_CR[1] = 0x0000;
+  MDDR->core.ddrc.DFI_CTRLUPD_TIME_INTERVAL_CR = 0x0309;
+  MDDR->core.ddrc.AXI_FABRIC_PRI_ID_CR = 0x0000;
+  MDDR->core.ddrc.ECC_INT_CLR_REG = 0x0000;
+
+  MDDR->core.phy.LOOPBACK_TEST_CR = 0x0000;
+  MDDR->core.phy.CTRL_SLAVE_RATIO_CR = 0x0080;
+  MDDR->core.phy.DATA_SLICE_IN_USE_CR = 0x0003;
+  MDDR->core.phy.DQ_OFFSET_CR[0] = 0x00000000;
+  MDDR->core.phy.DQ_OFFSET_CR[2] = 0x0000;
+  MDDR->core.phy.DLL_LOCK_DIFF_CR = 0x000B;
+  MDDR->core.phy.FIFO_WE_SLAVE_RATIO_CR[0] = 0x0040;
+  MDDR->core.phy.FIFO_WE_SLAVE_RATIO_CR[1] = 0x0401;
+  MDDR->core.phy.FIFO_WE_SLAVE_RATIO_CR[2] = 0x4010;
+  MDDR->core.phy.FIFO_WE_SLAVE_RATIO_CR[3] = 0x0000;
+  MDDR->core.phy.LOCAL_ODT_CR = 0x0001;
+  MDDR->core.phy.RD_DQS_SLAVE_RATIO_CR[0] = 0x0040;
+  MDDR->core.phy.RD_DQS_SLAVE_RATIO_CR[1] = 0x0401;
+  MDDR->core.phy.RD_DQS_SLAVE_RATIO_CR[2] = 0x4010;
+  MDDR->core.phy.WR_DATA_SLAVE_RATIO_CR[0] = 0x0040;
+  MDDR->core.phy.WR_DATA_SLAVE_RATIO_CR[1] = 0x0401;
+  MDDR->core.phy.WR_DATA_SLAVE_RATIO_CR[2] = 0x4010;
+  MDDR->core.phy.WR_RD_RL_CR = 0x0021;
+  MDDR->core.phy.RDC_WE_TO_RE_DELAY_CR = 0x0003;
+  MDDR->core.phy.USE_FIXED_RE_CR = 0x0001;
+  MDDR->core.phy.USE_RANK0_DELAYS_CR = 0x0001;
+  MDDR->core.phy.CONFIG_CR = 0x0009;
+  MDDR->core.phy.DYN_RESET_CR = 0x01;
+  MDDR->core.ddrc.DYN_SOFT_RESET_CR = 0x01;
+  // inclusive-language: enable
+  // Wait for config
+  while ((MDDR->core.ddrc.DDRC_SR) == 0x0000) {
+  }
+#endif
+}
 
 extern "C" void pw_boot_PreStaticConstructorInit() {
   // TODO(skeys) add "#if no_bootLoader" and the functions needed for init.
diff --git a/targets/emcraft_sf2_som/config/sf2_mss_hal_conf.h b/targets/emcraft_sf2_som/config/sf2_mss_hal_conf.h
index 2f16d2e..0e82d68 100644
--- a/targets/emcraft_sf2_som/config/sf2_mss_hal_conf.h
+++ b/targets/emcraft_sf2_som/config/sf2_mss_hal_conf.h
@@ -14,8 +14,12 @@
 
 #pragma once
 
+#if (MSS_SYS_MDDR_CONFIG_BY_CORTEX == 1)
+#error "Please turn off DDR initialization! See the comment in this file above."
+#endif
+
 #define HAL_GPIO_MODULE_ENABLED
-#include "mss_gpio.h"
+#include "mss_gpio/mss_gpio.h"
 
 #define HAL_UART_MODULE_ENABLED
-#include "mss_uart.h"
+#include "mss_uart/mss_uart.h"
diff --git a/third_party/smartfusion_mss/BUILD.bazel b/third_party/smartfusion_mss/BUILD.bazel
new file mode 100644
index 0000000..bf4dfa0
--- /dev/null
+++ b/third_party/smartfusion_mss/BUILD.bazel
@@ -0,0 +1,40 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+load(
+    "//pw_build:pigweed.bzl",
+    "pw_cc_library",
+)
+
+# Ready-made configurations
+liberosoc_configs = [
+    ("default", "configs/config_default.h"),
+    ("debug", "configs/config_debug.h"),
+]
+
+# Config targets.
+[
+    pw_cc_library(
+        name = "%s_config" % config_name,
+        hdrs = [
+            config_header,
+            "configs/config_pigweed_common.h",
+        ],
+        copts = ["-Dmss_CONFIG_FILE=\"%s\"" % config_header],
+        includes = ["."],
+    )
+    for config_name, config_header in liberosoc_configs
+]
+
+# TODO(skeys): Add build recipe for the library.
diff --git a/third_party/smartfusion_mss/BUILD.gn b/third_party/smartfusion_mss/BUILD.gn
new file mode 100644
index 0000000..2e3c108
--- /dev/null
+++ b/third_party/smartfusion_mss/BUILD.gn
@@ -0,0 +1,112 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import("//build_overrides/pigweed.gni")
+import("$dir_pw_build/linker_script.gni")
+import("$dir_pw_build/target_types.gni")
+import("$dir_pw_third_party/smartfusion_mss/mss.gni")
+
+declare_args() {
+  pw_target_smartfusion2_LINK_CONFIG_DEFINES = []
+}
+
+if (dir_pw_third_party_smartfusion_mss != "") {
+  # The list currently includes all source files for build.
+  smartfusion_mss_sources = [
+    "exported_firmware/CMSIS/startup_gcc/startup_m2sxxx.S",
+    "exported_firmware/CMSIS/system_m2sxxx.c",
+    "exported_firmware/drivers/mss_can/mss_can.c",
+    "exported_firmware/drivers/mss_ethernet_mac/m88e1340_phy.c",
+    "exported_firmware/drivers/mss_ethernet_mac/mss_ethernet_mac.c",
+    "exported_firmware/drivers/mss_gpio/mss_gpio.c",
+    "exported_firmware/drivers/mss_hpdma/mss_hpdma.c",
+    "exported_firmware/drivers/mss_i2c/mss_i2c.c",
+    "exported_firmware/drivers/mss_nvm/mss_nvm.c",
+    "exported_firmware/drivers/mss_rtc/mss_rtc.c",
+    "exported_firmware/drivers/mss_spi/mss_spi.c",
+    "exported_firmware/drivers/mss_sys_services/mss_comblk.c",
+    "exported_firmware/drivers/mss_sys_services/mss_sys_services.c",
+    "exported_firmware/drivers/mss_uart/mss_uart.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_common_cif.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_cdc.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_cif.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_hid.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_msd.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_printer.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_rndis.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_device_vendor.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_host.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_host_cif.c",
+    "exported_firmware/drivers/mss_usb/mss_usb_host_msc.c",
+    "exported_firmware/drivers_config/sys_config/sys_config.c",
+  ]
+
+  liberosoc_configs = [
+    {
+      name = "default"
+      config_header = "configs/config_default.h"
+    },
+    {
+      name = "debug"
+      config_header = "configs/config_debug.h"
+    },
+  ]
+
+  foreach(ele, liberosoc_configs) {
+    config_name = ele.name + "_config"
+    config(config_name) {
+      # Custom config file is specified by macro liberosoc_CONFIG_FILE
+      # for liberosoc
+      defines = [ "liberosoc_CONFIG_FILE=\"${ele.config_header}\"" ]
+    }
+
+    srcset_name = ele.name + "_config_srcset"
+    pw_source_set(srcset_name) {
+      public = [
+        "configs/config_pigweed_common.h",
+        ele.config_header,
+      ]
+      public_configs = [
+        ":${config_name}",
+        ":smartfusion_mss_common_config",
+      ]
+    }
+  }
+
+  config("smartfusion_mss_common_config") {
+    include_dirs = [
+      "$dir_pw_third_party_smartfusion_mss/exported_firmware/CMSIS/V4.5/Include",
+      "$dir_pw_third_party_smartfusion_mss/exported_firmware/drivers",
+      "$dir_pw_third_party_smartfusion_mss/exported_firmware/CMSIS",
+      "$dir_pw_third_party/smartfusion_mss",
+    ]
+    cflags = [
+      "-Wno-error=cast-qual",
+      "-Wno-error=redundant-decls",
+      "-w",
+    ]
+  }
+
+  pw_source_set("smartfusion_mss") {
+    sources = []
+    foreach(source, smartfusion_mss_sources) {
+      sources += [ "$dir_pw_third_party_smartfusion_mss/" + source ]
+    }
+    public_deps = [ ":${pw_third_party_smartfusion_mss_CONFIG}_config_srcset" ]
+  }
+} else {
+  group("smartfusion_mss") {
+  }
+}
diff --git a/third_party/smartfusion_mss/README.md b/third_party/smartfusion_mss/README.md
new file mode 100644
index 0000000..7532545
--- /dev/null
+++ b/third_party/smartfusion_mss/README.md
@@ -0,0 +1,6 @@
+# LiberoSoC Library
+
+The folder provides build scripts and configuration recipes for building
+the SmartFusion2 Microcontroller Subsystem library. The source code needs to be downloaded by the user, or
+via the support in pw_package "pw package install sf2mss". For gn build,
+set `dir_pw_third_party_smartfusion_mss` to point to the path of the source code.
diff --git a/third_party/smartfusion_mss/configs/config_debug.h b/third_party/smartfusion_mss/configs/config_debug.h
new file mode 100644
index 0000000..7eb6960
--- /dev/null
+++ b/third_party/smartfusion_mss/configs/config_debug.h
@@ -0,0 +1,19 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#pragma once
+
+#include "configs/config_pigweed_common.h"
+
+#define SF2_MSS_NO_BOOTLOADER 1
diff --git a/third_party/smartfusion_mss/configs/config_default.h b/third_party/smartfusion_mss/configs/config_default.h
new file mode 100644
index 0000000..30aee59
--- /dev/null
+++ b/third_party/smartfusion_mss/configs/config_default.h
@@ -0,0 +1,17 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#pragma once
+
+#include "configs/config_pigweed_common.h"
diff --git a/third_party/smartfusion_mss/configs/config_pigweed_common.h b/third_party/smartfusion_mss/configs/config_pigweed_common.h
new file mode 100644
index 0000000..3f4f6ea
--- /dev/null
+++ b/third_party/smartfusion_mss/configs/config_pigweed_common.h
@@ -0,0 +1,20 @@
+// Copyright 2022 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Some common configs for using mbedtls in Pigweed. These include disabling of
+// file system, socket and linux/windows specific features.
+// See include/mbedtls/config.h for a detail explanation of these
+// configurations.
+
+#pragma once
diff --git a/third_party/smartfusion_mss/mss.gni b/third_party/smartfusion_mss/mss.gni
new file mode 100644
index 0000000..dd97178
--- /dev/null
+++ b/third_party/smartfusion_mss/mss.gni
@@ -0,0 +1,24 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+declare_args() {
+  # If compiling backends with mbedtls, this variable is set to the path to the
+  # mbedtls source code. When set, a pw_source_set for the mbedtls library is
+  # created at "$dir_pw_third_party/mbedtls".
+  dir_pw_third_party_smartfusion_mss = ""
+
+  # configuration for mbedtls. Can be one of `mbedtls_configs` in the BUILD.gn
+  # file
+  pw_third_party_smartfusion_mss_CONFIG = "default"
+}