perf: batch ZIP size lookup in buildBookBin (O(n*m) → O(n log n))

Add ZipFile::fillUncompressedSizes() for single-pass ZIP central directory
scan with hash-based target matching.

Also apply clang-format fixes for CI.

Shadow Slave results:
- buildBookBin: 506s → 35s
- Total indexing: 8.7min → 50s
This commit is contained in:
Daniel 2026-01-21 19:55:24 -08:00
parent 06ced8f2d1
commit 8b08e684f8
6 changed files with 171 additions and 28 deletions

View File

@ -176,9 +176,46 @@ bool BookMetadataCache::buildBookBin(const std::string& epubPath, const BookMeta
// NOTE: We intentionally skip calling loadAllFileStatSlims() here. // NOTE: We intentionally skip calling loadAllFileStatSlims() here.
// For large EPUBs (2000+ chapters), pre-loading all ZIP central directory entries // For large EPUBs (2000+ chapters), pre-loading all ZIP central directory entries
// into memory causes OOM crashes on ESP32-C3's limited ~380KB RAM. // into memory causes OOM crashes on ESP32-C3's limited ~380KB RAM.
// Instead, we let loadFileStatSlim() do individual lookups per spine item. // Instead, for large books we use a one-pass batch lookup that scans the ZIP
// This is O(n*m) instead of O(n) for lookups, but avoids memory exhaustion. // central directory once and matches against spine targets using hash comparison.
// This is O(n*log(m)) instead of O(n*m) while avoiding memory exhaustion.
// See: https://github.com/crosspoint-reader/crosspoint-reader/issues/134 // See: https://github.com/crosspoint-reader/crosspoint-reader/issues/134
std::vector<uint32_t> spineSizes;
bool useBatchSizes = false;
if (spineCount >= LARGE_SPINE_THRESHOLD) {
Serial.printf("[%lu] [BMC] Using batch size lookup for %d spine items\n", millis(), spineCount);
std::vector<ZipFile::SizeTarget> targets;
targets.reserve(spineCount);
spineFile.seek(0);
for (int i = 0; i < spineCount; i++) {
auto entry = readSpineEntry(spineFile);
std::string path = FsHelpers::normalisePath(entry.href);
ZipFile::SizeTarget t;
t.hash = ZipFile::fnvHash64(path.c_str(), path.size());
t.len = static_cast<uint16_t>(path.size());
t.index = static_cast<uint16_t>(i);
targets.push_back(t);
}
std::sort(targets.begin(), targets.end(), [](const ZipFile::SizeTarget& a, const ZipFile::SizeTarget& b) {
return a.hash < b.hash || (a.hash == b.hash && a.len < b.len);
});
spineSizes.resize(spineCount, 0);
int matched = zip.fillUncompressedSizes(targets, spineSizes);
Serial.printf("[%lu] [BMC] Batch lookup matched %d/%d spine items\n", millis(), matched, spineCount);
targets.clear();
targets.shrink_to_fit();
useBatchSizes = true;
}
uint32_t cumSize = 0; uint32_t cumSize = 0;
spineFile.seek(0); spineFile.seek(0);
int lastSpineTocIndex = -1; int lastSpineTocIndex = -1;
@ -197,15 +234,24 @@ bool BookMetadataCache::buildBookBin(const std::string& epubPath, const BookMeta
} }
lastSpineTocIndex = spineEntry.tocIndex; lastSpineTocIndex = spineEntry.tocIndex;
// Calculate size for cumulative size
size_t itemSize = 0; size_t itemSize = 0;
if (useBatchSizes) {
itemSize = spineSizes[i];
if (itemSize == 0) {
const std::string path = FsHelpers::normalisePath(spineEntry.href); const std::string path = FsHelpers::normalisePath(spineEntry.href);
if (zip.getInflatedFileSize(path.c_str(), &itemSize)) { if (!zip.getInflatedFileSize(path.c_str(), &itemSize)) {
cumSize += itemSize;
spineEntry.cumulativeSize = cumSize;
} else {
Serial.printf("[%lu] [BMC] Warning: Could not get size for spine item: %s\n", millis(), path.c_str()); Serial.printf("[%lu] [BMC] Warning: Could not get size for spine item: %s\n", millis(), path.c_str());
} }
}
} else {
const std::string path = FsHelpers::normalisePath(spineEntry.href);
if (!zip.getInflatedFileSize(path.c_str(), &itemSize)) {
Serial.printf("[%lu] [BMC] Warning: Could not get size for spine item: %s\n", millis(), path.c_str());
}
}
cumSize += itemSize;
spineEntry.cumulativeSize = cumSize;
// Write out spine data to book.bin // Write out spine data to book.bin
writeSpineEntry(bookFile, spineEntry); writeSpineEntry(bookFile, spineEntry);
@ -282,8 +328,8 @@ void BookMetadataCache::createTocEntry(const std::string& title, const std::stri
uint64_t targetHash = fnvHash64(href); uint64_t targetHash = fnvHash64(href);
uint16_t targetLen = static_cast<uint16_t>(href.size()); uint16_t targetLen = static_cast<uint16_t>(href.size());
auto it = std::lower_bound(spineHrefIndex.begin(), spineHrefIndex.end(), auto it =
SpineHrefIndexEntry{targetHash, targetLen, 0}, std::lower_bound(spineHrefIndex.begin(), spineHrefIndex.end(), SpineHrefIndexEntry{targetHash, targetLen, 0},
[](const SpineHrefIndexEntry& a, const SpineHrefIndexEntry& b) { [](const SpineHrefIndexEntry& a, const SpineHrefIndexEntry& b) {
return a.hrefHash < b.hrefHash || (a.hrefHash == b.hrefHash && a.hrefLen < b.hrefLen); return a.hrefHash < b.hrefHash || (a.hrefHash == b.hrefHash && a.hrefLen < b.hrefLen);
}); });

View File

@ -135,8 +135,7 @@ void XMLCALL ContentOpfParser::startElement(void* userData, const XML_Char* name
// Sort item index for binary search if we have enough items // Sort item index for binary search if we have enough items
if (self->itemIndex.size() >= LARGE_SPINE_THRESHOLD) { if (self->itemIndex.size() >= LARGE_SPINE_THRESHOLD) {
std::sort(self->itemIndex.begin(), self->itemIndex.end(), std::sort(self->itemIndex.begin(), self->itemIndex.end(), [](const ItemIndexEntry& a, const ItemIndexEntry& b) {
[](const ItemIndexEntry& a, const ItemIndexEntry& b) {
return a.idHash < b.idHash || (a.idHash == b.idHash && a.idLen < b.idLen); return a.idHash < b.idHash || (a.idHash == b.idHash && a.idLen < b.idLen);
}); });
self->useItemIndex = true; self->useItemIndex = true;

View File

@ -1,8 +1,8 @@
#pragma once #pragma once
#include <Print.h> #include <Print.h>
#include <vector>
#include <algorithm> #include <algorithm>
#include <vector>
#include "Epub.h" #include "Epub.h"
#include "expat.h" #include "expat.h"

View File

@ -4,6 +4,8 @@
#include <SDCardManager.h> #include <SDCardManager.h>
#include <miniz.h> #include <miniz.h>
#include <algorithm>
bool inflateOneShot(const uint8_t* inputBuf, const size_t deflatedSize, uint8_t* outputBuf, const size_t inflatedSize) { bool inflateOneShot(const uint8_t* inputBuf, const size_t deflatedSize, uint8_t* outputBuf, const size_t inflatedSize) {
// Setup inflator // Setup inflator
const auto inflator = static_cast<tinfl_decompressor*>(malloc(sizeof(tinfl_decompressor))); const auto inflator = static_cast<tinfl_decompressor*>(malloc(sizeof(tinfl_decompressor)));
@ -302,6 +304,80 @@ bool ZipFile::getInflatedFileSize(const char* filename, size_t* size) {
return true; return true;
} }
int ZipFile::fillUncompressedSizes(std::vector<SizeTarget>& targets, std::vector<uint32_t>& sizes) {
if (targets.empty()) {
return 0;
}
const bool wasOpen = isOpen();
if (!wasOpen && !open()) {
return 0;
}
if (!loadZipDetails()) {
if (!wasOpen) {
close();
}
return 0;
}
file.seek(zipDetails.centralDirOffset);
int matched = 0;
uint32_t sig;
char itemName[256];
while (file.available()) {
file.read(&sig, 4);
if (sig != 0x02014b50) break;
file.seekCur(6);
uint16_t method;
file.read(&method, 2);
file.seekCur(8);
uint32_t compressedSize, uncompressedSize;
file.read(&compressedSize, 4);
file.read(&uncompressedSize, 4);
uint16_t nameLen, m, k;
file.read(&nameLen, 2);
file.read(&m, 2);
file.read(&k, 2);
file.seekCur(8);
uint32_t localHeaderOffset;
file.read(&localHeaderOffset, 4);
if (nameLen < 256) {
file.read(itemName, nameLen);
itemName[nameLen] = '\0';
uint64_t hash = fnvHash64(itemName, nameLen);
SizeTarget key = {hash, nameLen, 0};
auto it = std::lower_bound(targets.begin(), targets.end(), key, [](const SizeTarget& a, const SizeTarget& b) {
return a.hash < b.hash || (a.hash == b.hash && a.len < b.len);
});
while (it != targets.end() && it->hash == hash && it->len == nameLen) {
if (it->index < sizes.size()) {
sizes[it->index] = uncompressedSize;
matched++;
}
++it;
}
} else {
file.seekCur(nameLen);
}
file.seekCur(m + k);
}
if (!wasOpen) {
close();
}
return matched;
}
uint8_t* ZipFile::readFileToMemory(const char* filename, size_t* size, const bool trailingNullByte) { uint8_t* ZipFile::readFileToMemory(const char* filename, size_t* size, const bool trailingNullByte) {
const bool wasOpen = isOpen(); const bool wasOpen = isOpen();
if (!wasOpen && !open()) { if (!wasOpen && !open()) {

View File

@ -3,6 +3,7 @@
#include <string> #include <string>
#include <unordered_map> #include <unordered_map>
#include <vector>
class ZipFile { class ZipFile {
public: public:
@ -19,6 +20,23 @@ class ZipFile {
bool isSet; bool isSet;
}; };
// Target for batch uncompressed size lookup (sorted by hash, then len)
struct SizeTarget {
uint64_t hash; // FNV-1a 64-bit hash of normalized path
uint16_t len; // Length of path for collision reduction
uint16_t index; // Caller's index (e.g. spine index)
};
// FNV-1a 64-bit hash computed from char buffer (no std::string allocation)
static uint64_t fnvHash64(const char* s, size_t len) {
uint64_t hash = 14695981039346656037ull;
for (size_t i = 0; i < len; i++) {
hash ^= static_cast<uint8_t>(s[i]);
hash *= 1099511628211ull;
}
return hash;
}
private: private:
const std::string& filePath; const std::string& filePath;
FsFile file; FsFile file;
@ -43,6 +61,10 @@ class ZipFile {
bool close(); bool close();
bool loadAllFileStatSlims(); bool loadAllFileStatSlims();
bool getInflatedFileSize(const char* filename, size_t* size); bool getInflatedFileSize(const char* filename, size_t* size);
// Batch lookup: scan ZIP central dir once and fill sizes for matching targets.
// targets must be sorted by (hash, len). sizes[target.index] receives uncompressedSize.
// Returns number of targets matched.
int fillUncompressedSizes(std::vector<SizeTarget>& targets, std::vector<uint32_t>& sizes);
// Due to the memory required to run each of these, it is recommended to not preopen the zip file for multiple // Due to the memory required to run each of these, it is recommended to not preopen the zip file for multiple
// These functions will open and close the zip as needed // These functions will open and close the zip as needed
uint8_t* readFileToMemory(const char* filename, size_t* size = nullptr, bool trailingNullByte = false); uint8_t* readFileToMemory(const char* filename, size_t* size = nullptr, bool trailingNullByte = false);