1
0
mirror of https://xff.cz/git/u-boot/ synced 2025-09-17 16:42:11 +02:00

binman: Store the original data before compression

When compressing an entry, the original uncompressed data is overwritten.
Store it so it is available if needed.

Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
Simon Glass
2020-10-26 17:40:15 -06:00
parent 7d398bb1c7
commit 97c3e9a6fa
2 changed files with 18 additions and 1 deletions

View File

@@ -60,7 +60,10 @@ class Entry(object):
the containing section, 0 if none. The pad bytes become part of
the entry.
data: Contents of entry (string of bytes). This does not include
padding created by pad_before or pad_after
padding created by pad_before or pad_after. If the entry is
compressed, this contains the compressed data.
uncomp_data: Original uncompressed data, if this entry is compressed,
else None
compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
orig_offset: Original offset value read from node
orig_size: Original size value read from node
@@ -83,6 +86,7 @@ class Entry(object):
self.pre_reset_size = None
self.uncomp_size = None
self.data = None
self.uncomp_data = None
self.contents_size = 0
self.align = None
self.align_size = None
@@ -856,6 +860,7 @@ features to produce new behaviours.
Returns:
Compressed data (first word is the compressed size)
"""
self.uncomp_data = indata
if self.compress != 'none':
self.uncomp_size = len(indata)
data = tools.Compress(indata, self.compress)

View File

@@ -1810,6 +1810,18 @@ class TestFunctional(unittest.TestCase):
props = self._GetPropTree(dtb, ['size', 'uncomp-size'])
orig = self._decompress(data)
self.assertEquals(COMPRESS_DATA, orig)
# Do a sanity check on various fields
image = control.images['image']
entries = image.GetEntries()
self.assertEqual(1, len(entries))
entry = entries['blob']
self.assertEqual(COMPRESS_DATA, entry.uncomp_data)
self.assertEqual(len(COMPRESS_DATA), entry.uncomp_size)
orig = self._decompress(entry.data)
self.assertEqual(orig, entry.uncomp_data)
expected = {
'blob:uncomp-size': len(COMPRESS_DATA),
'blob:size': len(data),