From b77342889e665d505a7e42f913fa05c2c92c848f Mon Sep 17 00:00:00 2001
From: Ryan Kuester <kuester@bdti.com>
Date: Fri, 4 Oct 2024 09:59:10 -0500
Subject: [PATCH] feat(compression): add work-in-progress compression and
 viewer tools

---
 tensorflow/lite/micro/BUILD                   |  26 ++++
 tensorflow/lite/micro/compression/BUILD       |  23 ++--
 tensorflow/lite/micro/compression/compress.py |   8 ++
 .../lite/micro/compression/test_models.py     |   5 +-
 tensorflow/lite/micro/compression/view.py     |  87 ++++++++----
 .../lite/micro/compression/view_test.py       |  88 +++++++++++++
 tensorflow/lite/micro/hexdump.cc              | 103 +++++++++++++++
 tensorflow/lite/micro/hexdump.h               |  35 +++++
 tensorflow/lite/micro/hexdump_test.cc         |  58 ++++++++
 .../micro/tools/ci_build/test_code_style.sh   |   1 +
 third_party/python_requirements.in            |   1 +
 third_party/python_requirements.txt           | 124 ++++++++++++++++++
 12 files changed, 519 insertions(+), 40 deletions(-)
 create mode 100644 tensorflow/lite/micro/compression/view_test.py
 create mode 100644 tensorflow/lite/micro/hexdump.cc
 create mode 100644 tensorflow/lite/micro/hexdump.h
 create mode 100644 tensorflow/lite/micro/hexdump_test.cc

diff --git a/tensorflow/lite/micro/BUILD b/tensorflow/lite/micro/BUILD
index 1753465425d..5dd10b514b1 100644
--- a/tensorflow/lite/micro/BUILD
+++ b/tensorflow/lite/micro/BUILD
@@ -381,6 +381,20 @@ cc_library(
     ],
 )
 
+cc_library(
+    name = "hexdump",
+    srcs = [
+        "hexdump.cc",
+    ],
+    hdrs = [
+        "hexdump.h",
+    ],
+    deps = [
+        ":span",
+        ":static_vector",
+    ],
+)
+
 cc_library(
     name = "recording_allocators",
     srcs = [
@@ -556,6 +570,18 @@ cc_test(
     ],
 )
 
+cc_test(
+    name = "hexdump_test",
+    size = "small",
+    srcs = [
+        "hexdump_test.cc",
+    ],
+    deps = [
+        ":hexdump",
+        "//tensorflow/lite/micro/testing:micro_test",
+    ],
+)
+
 cc_test(
     name = "memory_helpers_test",
     srcs = [
diff --git a/tensorflow/lite/micro/compression/BUILD b/tensorflow/lite/micro/compression/BUILD
index c5bf221686b..937599cc7cf 100644
--- a/tensorflow/lite/micro/compression/BUILD
+++ b/tensorflow/lite/micro/compression/BUILD
@@ -75,6 +75,7 @@ py_binary(
         "@absl_py//absl/flags",
         "@absl_py//absl/logging",
         "@flatbuffers//:runtime_py",
+        requirement("bitarray"),
         requirement("numpy"),
     ],
 )
@@ -92,30 +93,28 @@ py_test(
 )
 
 py_binary(
-    name = "discretize",
+    name = "view",
     srcs = [
-        "discretize.py",
+        "view.py",
     ],
     deps = [
         ":metadata_py",
         "//tensorflow/lite/python:schema_py",
         "@absl_py//absl:app",
-        "@absl_py//absl/flags",
-        "@absl_py//absl/logging",
-        "@flatbuffers//:runtime_py",
-        requirement("numpy"),
+        requirement("bitarray"),
     ],
 )
 
-py_binary(
-    name = "view",
+py_test(
+    name = "view_test",
+    size = "small",
     srcs = [
-        "view.py",
+        "view_test.py",
     ],
     deps = [
-        ":metadata_py",
-        "//tensorflow/lite/python:schema_py",
-        "@absl_py//absl:app",
+        ":test_models",
+        ":view",
+        "@absl_py//absl/testing:absltest",
     ],
 )
 
diff --git a/tensorflow/lite/micro/compression/compress.py b/tensorflow/lite/micro/compression/compress.py
index 70e7593147c..01c3415bef7 100644
--- a/tensorflow/lite/micro/compression/compress.py
+++ b/tensorflow/lite/micro/compression/compress.py
@@ -31,6 +31,9 @@
 from functools import reduce
 from typing import Sequence
 import math
+import os
+import sys
+import textwrap
 
 from tflite_micro.tensorflow.lite.micro.compression import (
     lib,
@@ -246,4 +249,9 @@ def main(argv):
 
 
 if __name__ == "__main__":
+  name = os.path.basename(sys.argv[0])
+  usage = textwrap.dedent(f"""\
+      Usage: {name} <INPUT> <OUTPUT> [--tensors=<SPEC>] [--alt_axis_tensors=<SPEC>]
+      Compress a .tflite model.""")
+  sys.modules['__main__'].__doc__ = usage
   absl.app.run(main)
diff --git a/tensorflow/lite/micro/compression/test_models.py b/tensorflow/lite/micro/compression/test_models.py
index e3dc951a369..b25782cfbc6 100644
--- a/tensorflow/lite/micro/compression/test_models.py
+++ b/tensorflow/lite/micro/compression/test_models.py
@@ -30,6 +30,9 @@ def build(spec: dict) -> bytearray:
     A tflite flatbuffer.
   """
   root = tflite.ModelT()
+  description = spec.get("description")
+  if description is not None:
+    root.description = description
 
   root.operatorCodes = []
   for id, operator_code in spec["operator_codes"].items():
@@ -57,7 +60,7 @@ def build(spec: dict) -> bytearray:
     for id, tensor in subgraph["tensors"].items():
       assert id == len(subgraph_t.tensors)
       tensor_t = tflite.TensorT()
-      tensor_t.name = tensor.get("name", f"tensor{id}")
+      tensor_t.name = tensor.get("name", None)
       tensor_t.shape = tensor["shape"]
       tensor_t.type = tensor["type"]
       tensor_t.buffer = tensor["buffer"]
diff --git a/tensorflow/lite/micro/compression/view.py b/tensorflow/lite/micro/compression/view.py
index 5719b0841af..9de3acbc445 100644
--- a/tensorflow/lite/micro/compression/view.py
+++ b/tensorflow/lite/micro/compression/view.py
@@ -12,9 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import pprint
 import bitarray
 import bitarray.util
+import pprint
+import textwrap
+import os
+import sys
 
 import lib
 from tensorflow.lite.micro.compression import metadata_py_generated as compression_schema
@@ -53,22 +56,41 @@ def unpack_TensorType(type):
   return lut[type]
 
 
+def _decode_name(name):
+  """Returns name as a str or 'None'.
+
+  The flatbuffer library returns names as bytes objects or None. This function
+  returns a str, decoded from the bytes object, or None.
+  """
+  if name is None:
+    return None
+  else:
+    return str(name, encoding="utf-8")
+
+
 def unpack_tensors(tensors):
   result = []
   for index, t in enumerate(tensors):
     d = {
-        "_index": index,
-        "name": t.name.decode("utf-8"),
+        "_tensor": index,
+        "name": _decode_name(t.name),
         "type": unpack_TensorType(t.type),
-        "variable": t.isVariable,
         "shape": unpack_array(t.shape),
         "buffer": t.buffer,
     }
-    if t.quantization is not None:
-      d["quantization"] = [
-          unpack_array(t.quantization.scale),
-          unpack_array(t.quantization.zeroPoint)
-      ]
+
+    if t.isVariable:
+      d["is_variable"] = True
+    else:
+      # don't display this unusual field
+      pass
+
+    if t.quantization is not None and t.quantization.scale is not None:
+      d["quantization"] = {
+          "scale": unpack_array(t.quantization.scale),
+          "zero": unpack_array(t.quantization.zeroPoint),
+          "dimension": t.quantization.quantizedDimension,
+      }
     result.append(d)
   return result
 
@@ -78,7 +100,7 @@ def unpack_subgraphs(subgraphs):
   for index, s in enumerate(subgraphs):
     d = {
         "_index": index,
-        "name": s.name,
+        "name": _decode_name(s.name),
         # "inputs": s.inputs,
         # "outputs": s.outputs,
         "operators": unpack_operators(s.operators),
@@ -92,7 +114,7 @@ def unpack_metadata(metadata):
   if metadata is None:
     return None
   return [{
-      "name": m.name.decode("utf-8"),
+      "name": _decode_name(m.name),
       "buffer": m.buffer
   } for m in metadata]
 
@@ -157,8 +179,8 @@ def unpack_buffers(model, compression_metadata=None, unpacked_metadata=None):
   buffers = model.buffers
   result = []
   for index, b in enumerate(buffers):
-    d = {"buffer": index}
-    d = d | {"bytes": len(b.data) if b.data is not None else 0}
+    d = {"_buffer": index}
+    d = d | {"_bytes": len(b.data) if b.data is not None else 0}
     d = d | {"data": unpack_array(b.data)}
     if index == compression_metadata:
       if unpacked_metadata is not None:
@@ -184,12 +206,20 @@ def get_compression_metadata_buffer(model):
   if model.metadata is None:
     return None
   for item in model.metadata:
-    if item.name.decode("utf-8") == "COMPRESSION_METADATA":
+    if _decode_name(item.name) == "COMPRESSION_METADATA":
       return item.buffer
   return None
 
 
-def print_model(model, format=None):
+def create_dictionary(flatbuffer: memoryview) -> dict:
+  """Returns a human-readable dictionary from the provided model flatbuffer.
+
+  This function transforms a .tflite model flatbuffer into a Python dictionary.
+  When pretty-printed, this dictionary offers an easily interpretable view of
+  the model.
+  """
+  model = tflite_schema.ModelT.InitFromPackedBuf(flatbuffer, 0)
+
   comp_metadata_index = get_compression_metadata_buffer(model)
   comp_metadata_unpacked = None
   if comp_metadata_index is not None:
@@ -201,30 +231,33 @@ def print_model(model, format=None):
 
   output = {
       "description":
-          model.description.decode("utf-8"),
+      model.description,
       "version":
-          model.version,
+      model.version,
       "operator_codes":
-          unpack_list(model.operatorCodes),
+      unpack_list(model.operatorCodes),
       "metadata":
-          unpack_metadata(model.metadata),
+      unpack_metadata(model.metadata),
       "subgraphs":
-          unpack_subgraphs(model.subgraphs),
+      unpack_subgraphs(model.subgraphs),
       "buffers":
-          unpack_buffers(model, comp_metadata_index, comp_metadata_unpacked),
+      unpack_buffers(model, comp_metadata_index, comp_metadata_unpacked),
   }
 
-  pprint.pprint(output, width=90, sort_dicts=False, compact=True)
+  return output
 
 
 def main(argv):
   path = argv[1]
-  with open(path, 'rb') as file:
-    model = tflite_schema.ModelT.InitFromPackedBuf(file.read(), 0)
-
-  print_model(model)
+  with open(path, 'rb') as flatbuffer:
+    d = create_dictionary(memoryview(flatbuffer.read()))
+    pprint.pprint(d, width=90, sort_dicts=False, compact=True)
 
 
 if __name__ == "__main__":
+  name = os.path.basename(sys.argv[0])
+  usage = textwrap.dedent(f"""\
+      Usage: {name} <MODEL>
+      Print a visualization of a .tflite model.""")
+  sys.modules['__main__'].__doc__ = usage
   absl.app.run(main)
-  sys.exit(rc)
diff --git a/tensorflow/lite/micro/compression/view_test.py b/tensorflow/lite/micro/compression/view_test.py
new file mode 100644
index 00000000000..47c02cfe5ea
--- /dev/null
+++ b/tensorflow/lite/micro/compression/view_test.py
@@ -0,0 +1,88 @@
+# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from absl.testing import absltest
+
+import test_models
+import view
+
+_MODEL = {
+    "description": "Test model",
+    "operator_codes": {
+        0: {
+            "builtin_code": 0,
+        },
+        1: {
+            "builtin_code": 1,
+        },
+    },
+    "subgraphs": {
+        0: {
+            "operators": {
+                0: {
+                    "opcode_index": 1,
+                    "inputs": (
+                        0,
+                        1,
+                    ),
+                    "outputs": (3, ),
+                },
+                1: {
+                    "opcode_index": 0,
+                    "inputs": (
+                        3,
+                        2,
+                    ),
+                    "outputs": (4, ),
+                },
+            },
+            "tensors": {
+                0: {
+                    "shape": (16, 1),
+                    "type": 1,
+                    "buffer": 1,
+                },
+                1: {
+                    "shape": (16, 1),
+                    "type": 1,
+                    "buffer": 1,
+                },
+            },
+        },
+    },
+    "buffers": {
+        0: bytes(),
+        1: bytes(i for i in range(1, 16)),
+    }
+}
+
+
+class UnitTests(absltest.TestCase):
+
+  def testHelloWorld(self):
+    self.assertTrue(True)
+
+  def testSmokeTest(self):
+    flatbuffer = test_models.build(_MODEL)
+    view.create_dictionary(memoryview(flatbuffer))
+
+  def testStrippedDescription(self):
+    stripped = _MODEL.copy()
+    del stripped["description"]
+    flatbuffer = test_models.build(stripped)
+    view.create_dictionary(memoryview(flatbuffer))
+
+
+if __name__ == "__main__":
+  absltest.main()
diff --git a/tensorflow/lite/micro/hexdump.cc b/tensorflow/lite/micro/hexdump.cc
new file mode 100644
index 00000000000..fd0f6f7c84e
--- /dev/null
+++ b/tensorflow/lite/micro/hexdump.cc
@@ -0,0 +1,103 @@
+// Copyright 2024 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "tensorflow/lite/micro/hexdump.h"
+
+#include <algorithm>
+#include <cctype>
+
+#include "tensorflow/lite/micro/debug_log.h"
+#include "tensorflow/lite/micro/static_vector.h"
+
+namespace {
+
+tflite::Span<char> output(const tflite::Span<char>& buf, const char* format,
+                          ...) {
+  // Writes formatted output, printf-style, to either a buffer or DebugLog.
+  // Writes to DebugLog if the buffer data pointer is null. Does not exceed
+  // the size of the buffer. Returns the unused remainder of the buffer, or a
+  // buffer with a null data pointer in the case of printing to DebugLog.
+
+  tflite::Span<char> result{nullptr, 0};
+
+  va_list args;
+  va_start(args, format);
+
+  if (buf.data() == nullptr) {
+    DebugLog(format, args);
+    result = {nullptr, 0};
+  } else {
+    size_t len = DebugVsnprintf(buf.data(), buf.size(), format, args);
+    // Returns the number of characters that would have been written if
+    // there were enough room, so cap it at the size of the buffer in order to
+    // know how much was actually written.
+    size_t consumed = std::min(len, buf.size());
+    result = {buf.data() + consumed, buf.size() - consumed};
+  }
+
+  va_end(args);
+  return result;
+}
+
+}  // end anonymous namespace
+
+tflite::Span<char> tflite::hexdump(const tflite::Span<const std::byte> region,
+                                   const tflite::Span<char> out) {
+  tflite::Span<char> buffer{out};
+  std::size_t byte_nr = 0;
+  constexpr int per_line = 16;
+  const int lines = (region.size() + per_line - 1) / per_line;  // round up
+
+  for (int line = 0; line < lines; ++line) {
+    tflite::StaticVector<char, per_line> ascii;
+
+    // print address
+    buffer = output(buffer, "%08X:", line);
+
+    for (int pos = 0; pos < per_line; ++pos) {
+      if (byte_nr < region.size()) {
+        // print byte
+        int as_int = static_cast<int>(region[byte_nr++]);
+        buffer = output(buffer, " %02X", as_int);
+
+        // buffer an ascii printable value
+        char c{'.'};
+        if (std::isprint(as_int)) {
+          c = static_cast<char>(as_int);
+        }
+        ascii.push_back(c);
+      } else {
+        buffer = output(buffer, "   ");
+      }
+
+      // print extra space in middle of the line
+      if (pos == per_line / 2 - 1) {
+        buffer = output(buffer, " ");
+      }
+    }
+
+    // print the ascii value
+    buffer = output(buffer, "  ");
+    for (const auto& c : ascii) {
+      buffer = output(buffer, "%c", c);
+    }
+    buffer = output(buffer, "%c", '\n');
+  }
+
+  return {out.data(), out.size() - buffer.size()};
+}
+
+void tflite::hexdump(const tflite::Span<const std::byte> region) {
+  hexdump(region, {nullptr, 0});
+}
diff --git a/tensorflow/lite/micro/hexdump.h b/tensorflow/lite/micro/hexdump.h
new file mode 100644
index 00000000000..0bdfcc47c05
--- /dev/null
+++ b/tensorflow/lite/micro/hexdump.h
@@ -0,0 +1,35 @@
+// Copyright 2024 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_LITE_MICRO_HEXDUMP_H_
+#define TENSORFLOW_LITE_MICRO_HEXDUMP_H_
+
+#include <cstddef>
+
+#include "tensorflow/lite/micro/span.h"
+
+namespace tflite {
+
+// Displays the contents of a memory region, formatted in hexadecimal and ASCII
+// in a style matching Python's hexdump module, using DebugLog().
+void hexdump(Span<const std::byte> region);
+
+// Writes the contents of a memory region, formatted in hexadecimal and ASCII
+// in a style matching Python's hexdump module, to a buffer. Returns the portion
+// of the buffer written.
+Span<char> hexdump(Span<const std::byte> region, Span<char> buffer);
+
+}  // end namespace tflite
+
+#endif  // TENSORFLOW_LITE_MICRO_HEXDUMP_H_
diff --git a/tensorflow/lite/micro/hexdump_test.cc b/tensorflow/lite/micro/hexdump_test.cc
new file mode 100644
index 00000000000..89d3a0404c5
--- /dev/null
+++ b/tensorflow/lite/micro/hexdump_test.cc
@@ -0,0 +1,58 @@
+// Copyright 2024 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "tensorflow/lite/micro/hexdump.h"
+
+#include <array>
+
+#include "tensorflow/lite/micro/span.h"
+#include "tensorflow/lite/micro/testing/micro_test.h"
+
+constexpr tflite::Span<const char> input{
+    "This is an input string for testing."};
+
+const tflite::Span<const std::byte> region{
+    reinterpret_cast<const std::byte*>(input.data()), input.size()};
+
+// clang-format off
+constexpr tflite::Span<const char> expected{
+    "00000000: 54 68 69 73 20 69 73 20  61 6E 20 69 6E 70 75 74  This is an input\n"
+    "00000001: 20 73 74 72 69 6E 67 20  66 6F 72 20 74 65 73 74   string for test\n"
+    "00000002: 69 6E 67 2E 00                                    ing..\n"};
+// clang-format on
+
+// String literals have null terminators, but don't expect a null terminator
+// in the hexdump output.
+constexpr tflite::Span<const char> expected_no_null{expected.data(),
+                                                    expected.size() - 1};
+
+TF_LITE_MICRO_TESTS_BEGIN
+
+TF_LITE_MICRO_TEST(TestOutputToBuffer) {
+  // Allocate a buffer with an arbitrary amount of extra room so the test has
+  // the possibility of failing if hexdump mishandles the extra space.
+  std::array<char, expected.size() + 10> buffer;
+
+  tflite::Span<char> output = tflite::hexdump(region, buffer);
+  TF_LITE_MICRO_EXPECT(output == expected_no_null);
+}
+
+TF_LITE_MICRO_TEST(TestOutputToDebugLog) {
+  // There's no easy way to verify DebugLog output; however, test it anyhow to
+  // catch an outright crash, and so the output appears in the log should
+  // someone wish to examine it.
+  tflite::hexdump(region);
+}
+
+TF_LITE_MICRO_TESTS_END
diff --git a/tensorflow/lite/micro/tools/ci_build/test_code_style.sh b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh
index e06ac891bc0..16262c38c63 100755
--- a/tensorflow/lite/micro/tools/ci_build/test_code_style.sh
+++ b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh
@@ -99,6 +99,7 @@ tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/
   -e experimental \
   -e schema/schema_generated.h \
   -e schema/schema_utils.h \
+  -e tensorflow/lite/micro/compression/metadata_saved.h \
   -e tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h \
   -e tensorflow/lite/micro/compression/metadata_saved.h \
   -e tensorflow/lite/micro/compression/metadata_generated.h \
diff --git a/third_party/python_requirements.in b/third_party/python_requirements.in
index 29c081e5b18..f75d931b52b 100644
--- a/third_party/python_requirements.in
+++ b/third_party/python_requirements.in
@@ -26,6 +26,7 @@
 # is sensitive to the Python environment (interpreter version, etc.) in which
 # it is run.
 
+bitarray
 hexdump
 tensorflow
 twine
diff --git a/third_party/python_requirements.txt b/third_party/python_requirements.txt
index b0d91331ae0..487dd5635a6 100644
--- a/third_party/python_requirements.txt
+++ b/third_party/python_requirements.txt
@@ -15,6 +15,130 @@ astunparse==1.6.3 \
     --hash=sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872 \
     --hash=sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8
     # via tensorflow
+bitarray==2.9.2 \
+    --hash=sha256:03adaacb79e2fb8f483ab3a67665eec53bb3fd0cd5dbd7358741aef124688db3 \
+    --hash=sha256:052c5073bdcaa9dd10628d99d37a2f33ec09364b86dd1f6281e2d9f8d3db3060 \
+    --hash=sha256:0a99b23ac845a9ea3157782c97465e6ae026fe0c7c4c1ed1d88f759fd6ea52d9 \
+    --hash=sha256:0b3543c8a1cb286ad105f11c25d8d0f712f41c5c55f90be39f0e5a1376c7d0b0 \
+    --hash=sha256:128cc3488176145b9b137fdcf54c1c201809bbb8dd30b260ee40afe915843b43 \
+    --hash=sha256:1bb33673e7f7190a65f0a940c1ef63266abdb391f4a3e544a47542d40a81f536 \
+    --hash=sha256:1e0b63a565e8a311cc8348ff1262d5784df0f79d64031d546411afd5dd7ef67d \
+    --hash=sha256:1e497c535f2a9b68c69d36631bf2dba243e05eb343b00b9c7bbdc8c601c6802d \
+    --hash=sha256:1ff9e38356cc803e06134cf8ae9758e836ccd1b793135ef3db53c7c5d71e93bc \
+    --hash=sha256:21f21e7f56206be346bdbda2a6bdb2165a5e6a11821f88fd4911c5a6bbbdc7e2 \
+    --hash=sha256:2c6be1b651fad8f3adb7a5aa12c65b612cd9b89530969af941844ae680f7d981 \
+    --hash=sha256:2f32948c86e0d230a296686db28191b67ed229756f84728847daa0c7ab7406e3 \
+    --hash=sha256:321841cdad1dd0f58fe62e80e9c9c7531f8ebf8be93f047401e930dc47425b1e \
+    --hash=sha256:345c76b349ff145549652436235c5532e5bfe9db690db6f0a6ad301c62b9ef21 \
+    --hash=sha256:393cb27fd859af5fd9c16eb26b1c59b17b390ff66b3ae5d0dd258270191baf13 \
+    --hash=sha256:3c4344e96642e2211fb3a50558feff682c31563a4c64529a931769d40832ca79 \
+    --hash=sha256:3fa909cfd675004aed8b4cc9df352415933656e0155a6209d878b7cb615c787e \
+    --hash=sha256:405b83bed28efaae6d86b6ab287c75712ead0adbfab2a1075a1b7ab47dad4d62 \
+    --hash=sha256:43847799461d8ba71deb4d97b47250c2c2fb66d82cd3cb8b4caf52bb97c03034 \
+    --hash=sha256:461a3dafb9d5fda0bb3385dc507d78b1984b49da3fe4c6d56c869a54373b7008 \
+    --hash=sha256:48a30d718d1a6dfc22a49547450107abe8f4afdf2abdcbe76eb9ed88edc49498 \
+    --hash=sha256:4a22266fb416a3b6c258bf7f83c9fe531ba0b755a56986a81ad69dc0f3bcc070 \
+    --hash=sha256:4b558ce85579b51a2e38703877d1e93b7728a7af664dd45a34e833534f0b755d \
+    --hash=sha256:4d0e32530f941c41eddfc77600ec89b65184cb909c549336463a738fab3ed285 \
+    --hash=sha256:4da73ebd537d75fa7bccfc2228fcaedea0803f21dd9d0bf0d3b67fef3c4af294 \
+    --hash=sha256:4e2936f090bf3f4d1771f44f9077ebccdbc0415d2b598d51a969afcb519df505 \
+    --hash=sha256:508069a04f658210fdeee85a7a0ca84db4bcc110cbb1d21f692caa13210f24a7 \
+    --hash=sha256:5361413fd2ecfdf44dc8f065177dc6aba97fa80a91b815586cb388763acf7f8d \
+    --hash=sha256:54e16e32e60973bb83c315de9975bc1bcfc9bd50bb13001c31da159bc49b0ca1 \
+    --hash=sha256:5b7b09489b71f9f1f64c0fa0977e250ec24500767dab7383ba9912495849cadf \
+    --hash=sha256:5cb378eaa65cd43098f11ff5d27e48ee3b956d2c00d2d6b5bfc2a09fe183be47 \
+    --hash=sha256:5d6fb422772e75385b76ad1c52f45a68bd4efafd8be8d0061c11877be74c4d43 \
+    --hash=sha256:5f4dd3af86dd8a617eb6464622fb64ca86e61ce99b59b5c35d8cd33f9c30603d \
+    --hash=sha256:603e7d640e54ad764d2b4da6b61e126259af84f253a20f512dd10689566e5478 \
+    --hash=sha256:6067f2f07a7121749858c7daa93c8774325c91590b3e81a299621e347740c2ae \
+    --hash=sha256:60df43e868a615c7e15117a1e1c2e5e11f48f6457280eba6ddf8fbefbec7da99 \
+    --hash=sha256:64115ccabbdbe279c24c367b629c6b1d3da9ed36c7420129e27c338a3971bfee \
+    --hash=sha256:6465de861aff7a2559f226b37982007417eab8c3557543879987f58b453519bd \
+    --hash=sha256:648d2f2685590b0103c67a937c2fb9e09bcc8dfb166f0c7c77bd341902a6f5b3 \
+    --hash=sha256:64b433e26993127732ac7b66a7821b2537c3044355798de7c5fcb0af34b8296f \
+    --hash=sha256:677e67f50e2559efc677a4366707070933ad5418b8347a603a49a070890b19bc \
+    --hash=sha256:6ab0f1dbfe5070db98771a56aa14797595acd45a1af9eadfb193851a270e7996 \
+    --hash=sha256:6d70b1579da7fb71be5a841a1f965d19aca0ef27f629cfc07d06b09aafd0a333 \
+    --hash=sha256:6ec84668dd7b937874a2b2c293cd14ba84f37be0d196dead852e0ada9815d807 \
+    --hash=sha256:6f71d92f533770fb027388b35b6e11988ab89242b883f48a6fe7202d238c61f8 \
+    --hash=sha256:76b76a07d4ee611405045c6950a1e24c4362b6b44808d4ad6eea75e0dbc59af4 \
+    --hash=sha256:79a9b8b05f2876c7195a2b698c47528e86a73c61ea203394ff8e7a4434bda5c8 \
+    --hash=sha256:7c1f4bf6ea8eb9d7f30808c2e9894237a96650adfecbf5f3643862dc5982f89e \
+    --hash=sha256:7dfefdcb0dc6a3ba9936063cec65a74595571b375beabe18742b3d91d087eefd \
+    --hash=sha256:7e913098de169c7fc890638ce5e171387363eb812579e637c44261460ac00aa2 \
+    --hash=sha256:7eb8be687c50da0b397d5e0ab7ca200b5ebb639e79a9f5e285851d1944c94be9 \
+    --hash=sha256:7eea9318293bc0ea6447e9ebfba600a62f3428bea7e9c6d42170ae4f481dbab3 \
+    --hash=sha256:852e202875dd6dfd6139ce7ec4e98dac2b17d8d25934dc99900831e81c3adaef \
+    --hash=sha256:856bbe1616425f71c0df5ef2e8755e878d9504d5a531acba58ab4273c52c117a \
+    --hash=sha256:87580c7f7d14f7ec401eda7adac1e2a25e95153e9c339872c8ae61b3208819a1 \
+    --hash=sha256:87abb7f80c0a042f3fe8e5264da1a2756267450bb602110d5327b8eaff7682e7 \
+    --hash=sha256:90e3a281ffe3897991091b7c46fca38c2675bfd4399ffe79dfeded6c52715436 \
+    --hash=sha256:917905de565d9576eb20f53c797c15ba88b9f4f19728acabec8d01eee1d3756a \
+    --hash=sha256:9521f49ae121a17c0a41e5112249e6fa7f6a571245b1118de81fb86e7c1bc1ce \
+    --hash=sha256:962892646599529917ef26266091e4cb3077c88b93c3833a909d68dcc971c4e3 \
+    --hash=sha256:9ae5b0657380d2581e13e46864d147a52c1e2bbac9f59b59c576e42fa7d10cf0 \
+    --hash=sha256:9bbcfc7c279e8d74b076e514e669b683f77b4a2a328585b3f16d4c5259c91222 \
+    --hash=sha256:a035da89c959d98afc813e3c62f052690d67cfd55a36592f25d734b70de7d4b0 \
+    --hash=sha256:a09c4f81635408e3387348f415521d4b94198c562c23330f560596a6aaa26eaf \
+    --hash=sha256:a23397da092ef0a8cfe729571da64c2fc30ac18243caa82ac7c4f965087506ff \
+    --hash=sha256:a484061616fb4b158b80789bd3cb511f399d2116525a8b29b6334c68abc2310f \
+    --hash=sha256:a5cc9381fd54f3c23ae1039f977bfd6d041a5c3c1518104f616643c3a5a73b15 \
+    --hash=sha256:a620d8ce4ea2f1c73c6b6b1399e14cb68c6915e2be3fad5808c2998ed55b4acf \
+    --hash=sha256:a6cc6545d6d76542aee3d18c1c9485fb7b9812b8df4ebe52c4535ec42081b48f \
+    --hash=sha256:a8873089be2aa15494c0f81af1209f6e1237d762c5065bc4766c1b84321e1b50 \
+    --hash=sha256:a8f286a51a32323715d77755ed959f94bef13972e9a2fe71b609e40e6d27957e \
+    --hash=sha256:aeb60962ec4813c539a59fbd4f383509c7222b62c3fb1faa76b54943a613e33a \
+    --hash=sha256:b069ca9bf728e0c5c5b60e00a89df9af34cc170c695c3bfa3b372d8f40288efb \
+    --hash=sha256:b0ef2d0a6f1502d38d911d25609b44c6cc27bee0a4363dd295df78b075041b60 \
+    --hash=sha256:b306c4cf66912511422060f7f5e1149c8bdb404f8e00e600561b0749fdd45659 \
+    --hash=sha256:b35bfcb08b7693ab4bf9059111a6e9f14e07d57ac93cd967c420db58ab9b71e1 \
+    --hash=sha256:b44105792fbdcfbda3e26ee88786790fda409da4c71f6c2b73888108cf8f062f \
+    --hash=sha256:b76ffec27c7450b8a334f967366a9ebadaea66ee43f5b530c12861b1a991f503 \
+    --hash=sha256:ba0734aa300757c924f3faf8148e1b8c247176a0ac8e16aefdf9c1eb19e868f7 \
+    --hash=sha256:bb198c6ed1edbcdaf3d1fa3c9c9d1cdb7e179a5134ef5ee660b53cdec43b34e7 \
+    --hash=sha256:bb6b86cfdfc503e92cb71c68766a24565359136961642504a7cc9faf936d9c88 \
+    --hash=sha256:be94e5a685e60f9d24532af8fe5c268002e9016fa80272a94727f435de3d1003 \
+    --hash=sha256:bed637b674db5e6c8a97a4a321e3e4d73e72d50b5c6b29950008a93069cc64cd \
+    --hash=sha256:c5b399ae6ab975257ec359f03b48fc00b1c1cd109471e41903548469b8feae5c \
+    --hash=sha256:c71d1cabdeee0cdda4669168618f0e46b7dace207b29da7b63aaa1adc2b54081 \
+    --hash=sha256:c7d16beeaaab15b075990cd26963d6b5b22e8c5becd131781514a00b8bdd04bd \
+    --hash=sha256:c8919fdbd3bb596b104388b56ae4b266eb28da1f2f7dff2e1f9334a21840fe96 \
+    --hash=sha256:c9b87baa7bfff9a5878fcc1bffe49ecde6e647a72a64b39a69cd8a2992a43a34 \
+    --hash=sha256:cd56b8ae87ebc71bcacbd73615098e8a8de952ecbb5785b6b4e2b07da8a06e1f \
+    --hash=sha256:cd926e8ae4d1ed1ac4a8f37212a62886292f692bc1739fde98013bf210c2d175 \
+    --hash=sha256:cf0620da2b81946d28c0b16f3e3704d38e9837d85ee4f0652816e2609aaa4fed \
+    --hash=sha256:d14c790b91f6cbcd9b718f88ed737c78939980c69ac8c7f03dd7e60040c12951 \
+    --hash=sha256:d4bba8042ea6ab331ade91bc435d81ad72fddb098e49108610b0ce7780c14e68 \
+    --hash=sha256:d527172919cdea1e13994a66d9708a80c3d33dedcf2f0548e4925e600fef3a3a \
+    --hash=sha256:d656ad38c942e38a470ddbce26b5020e08e1a7ea86b8fd413bb9024b5189993a \
+    --hash=sha256:d6fe315355cdfe3ed22ef355b8bdc81a805ca4d0949d921576560e5b227a1112 \
+    --hash=sha256:d91406f413ccbf4af6ab5ae7bc78f772a95609f9ddd14123db36ef8c37116d95 \
+    --hash=sha256:dac2399ee2889fbdd3472bfc2ede74c34cceb1ccf29a339964281a16eb1d3188 \
+    --hash=sha256:dbaf2bb71d6027152d603f1d5f31e0dfd5e50173d06f877bec484e5396d4594b \
+    --hash=sha256:e064caa55a6ed493aca1eda06f8b3f689778bc780a75e6ad7724642ba5dc62f7 \
+    --hash=sha256:e40b3cb9fa1edb4e0175d7c06345c49c7925fe93e39ef55ecb0bc40c906b0c09 \
+    --hash=sha256:e49066d251dbbe4e6e3a5c3937d85b589e40e2669ad0eef41a00f82ec17d844b \
+    --hash=sha256:e6ec283d4741befb86e8c3ea2e9ac1d17416c956d392107e45263e736954b1f7 \
+    --hash=sha256:e788608ed7767b7b3bbde6d49058bccdf94df0de9ca75d13aa99020cc7e68095 \
+    --hash=sha256:e8a9475d415ef1eaae7942df6f780fa4dcd48fce32825eda591a17abba869299 \
+    --hash=sha256:e8da5355d7d75a52df5b84750989e34e39919ec7e59fafc4c104cc1607ab2d31 \
+    --hash=sha256:ea1923d2e7880f9e1959e035da661767b5a2e16a45dfd57d6aa831e8b65ee1bf \
+    --hash=sha256:ea816dc8f8e65841a8bbdd30e921edffeeb6f76efe6a1eb0da147b60d539d1cf \
+    --hash=sha256:eb7a9d8a2e400a1026de341ad48e21670a6261a75b06df162c5c39b0d0e7c8f4 \
+    --hash=sha256:eceb551dfeaf19c609003a69a0cf8264b0efd7abc3791a11dfabf4788daf0d19 \
+    --hash=sha256:ed0f7982f10581bb16553719e5e8f933e003f5b22f7d25a68bdb30fac630a6ff \
+    --hash=sha256:f00079f8e69d75c2a417de7961a77612bb77ef46c09bc74607d86de4740771ef \
+    --hash=sha256:f0b84fc50b6dbeced4fa390688c07c10a73222810fb0e08392bd1a1b8259de36 \
+    --hash=sha256:f135e804986b12bf14f2cd1eb86674c47dea86c4c5f0fa13c88978876b97ebe6 \
+    --hash=sha256:f2de9a31c34e543ae089fd2a5ced01292f725190e379921384f695e2d7184bd3 \
+    --hash=sha256:f2f8692f95c9e377eb19ca519d30d1f884b02feb7e115f798de47570a359e43f \
+    --hash=sha256:f4dcadb7b8034aa3491ee8f5a69b3d9ba9d7d1e55c3cc1fc45be313e708277f8 \
+    --hash=sha256:f4f44381b0a4bdf64416082f4f0e7140377ae962c0ced6f983c6d7bbfc034040 \
+    --hash=sha256:f708e91fdbe443f3bec2df394ed42328fb9b0446dff5cb4199023ac6499e09fd \
+    --hash=sha256:f9346e98fc2abcef90b942973087e2462af6d3e3710e82938078d3493f7fef52 \
+    --hash=sha256:fc6d3e80dd8239850f2604833ff3168b28909c8a9357abfed95632cccd17e3e7 \
+    --hash=sha256:fe71fd4b76380c2772f96f1e53a524da7063645d647a4fcd3b651bdd80ca0f2e
+    # via -r third_party/python_requirements.in
 certifi==2023.11.17 \
     --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \
     --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474