Nick Child <nnac123@xxxxxxxxxxxxx> writes:
diff --git a/lib/hexdump.c b/lib/hexdump.cSeveral callers of print_hex_dump pass true for the ascii parameter,
index c3db7c3a7643..181b82dfe40d 100644
--- a/lib/hexdump.c
+++ b/lib/hexdump.c
@@ -263,19 +263,14 @@ void print_hex_dump(const char *level, const char *prefix_str, int prefix_type,
const void *buf, size_t len, bool ascii)
{
- for (i = 0; i < len; i += rowsize) {
- linelen = min(remaining, rowsize);
- remaining -= rowsize;
-
- hex_dump_to_buffer(ptr + i, linelen, rowsize, groupsize,
- linebuf, sizeof(linebuf), ascii);
-
+ for_each_line_in_hex_dump(i, rowsize, linebuf, sizeof(linebuf),
+ groupsize, buf, len) {
which gets passed along to hex_dump_to_buffer. But you ignore it in
for_each_line_in_hex_dump and always use false:
+ #define for_each_line_in_hex_dump(i, rowsize, linebuf, linebuflen, groupsize, \
+ buf, len) \
+ for ((i) = 0; \
+ (i) < (len) && \
+ hex_dump_to_buffer((unsigned char *)(buf) + (i), \
+ (len) - (i), (rowsize), (groupsize), \
+ (linebuf), (linebuflen), false); \
+ (i) += (rowsize) == 32 ? 32 : 16 \
+ )
Is this behavior change intended?
-Dave