mirror of
https://github.com/daveallie/crosspoint-reader.git
synced 2026-02-08 08:37:38 +03:00
Compare commits
2 Commits
06ced8f2d1
...
f3e3f4c56e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f3e3f4c56e | ||
|
|
05e409ce99 |
@ -48,6 +48,7 @@ bool BookMetadataCache::beginTocPass() {
|
|||||||
spineFile.close();
|
spineFile.close();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +125,18 @@ bool BookMetadataCache::buildBookBin(const std::string& epubPath, const BookMeta
|
|||||||
// LUTs complete
|
// LUTs complete
|
||||||
// Loop through spines from spine file matching up TOC indexes, calculating cumulative size and writing to book.bin
|
// Loop through spines from spine file matching up TOC indexes, calculating cumulative size and writing to book.bin
|
||||||
|
|
||||||
|
// Build spineIndex->tocIndex mapping in one pass (O(n) instead of O(n*m))
|
||||||
|
std::vector<int16_t> spineToTocIndex(spineCount, -1);
|
||||||
|
tocFile.seek(0);
|
||||||
|
for (int j = 0; j < tocCount; j++) {
|
||||||
|
auto tocEntry = readTocEntry(tocFile);
|
||||||
|
if (tocEntry.spineIndex >= 0 && tocEntry.spineIndex < spineCount) {
|
||||||
|
if (spineToTocIndex[tocEntry.spineIndex] == -1) {
|
||||||
|
spineToTocIndex[tocEntry.spineIndex] = static_cast<int16_t>(j);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ZipFile zip(epubPath);
|
ZipFile zip(epubPath);
|
||||||
// Pre-open zip file to speed up size calculations
|
// Pre-open zip file to speed up size calculations
|
||||||
if (!zip.open()) {
|
if (!zip.open()) {
|
||||||
@ -150,14 +163,7 @@ bool BookMetadataCache::buildBookBin(const std::string& epubPath, const BookMeta
|
|||||||
for (int i = 0; i < spineCount; i++) {
|
for (int i = 0; i < spineCount; i++) {
|
||||||
auto spineEntry = readSpineEntry(spineFile);
|
auto spineEntry = readSpineEntry(spineFile);
|
||||||
|
|
||||||
tocFile.seek(0);
|
spineEntry.tocIndex = spineToTocIndex[i];
|
||||||
for (int j = 0; j < tocCount; j++) {
|
|
||||||
auto tocEntry = readTocEntry(tocFile);
|
|
||||||
if (tocEntry.spineIndex == i) {
|
|
||||||
spineEntry.tocIndex = j;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not a huge deal if we don't fine a TOC entry for the spine entry, this is expected behaviour for EPUBs
|
// Not a huge deal if we don't fine a TOC entry for the spine entry, this is expected behaviour for EPUBs
|
||||||
// Logging here is for debugging
|
// Logging here is for debugging
|
||||||
@ -248,19 +254,15 @@ void BookMetadataCache::createTocEntry(const std::string& title, const std::stri
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int spineIndex = -1;
|
int16_t spineIndex = -1;
|
||||||
// find spine index
|
|
||||||
// TODO: This lookup is slow as need to scan through all items each time. We can't hold it all in memory due to size.
|
|
||||||
// But perhaps we can load just the hrefs in a vector/list to do an index lookup?
|
|
||||||
spineFile.seek(0);
|
spineFile.seek(0);
|
||||||
for (int i = 0; i < spineCount; i++) {
|
for (int i = 0; i < spineCount; i++) {
|
||||||
auto spineEntry = readSpineEntry(spineFile);
|
auto spineEntry = readSpineEntry(spineFile);
|
||||||
if (spineEntry.href == href) {
|
if (spineEntry.href == href) {
|
||||||
spineIndex = i;
|
spineIndex = static_cast<int16_t>(i);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (spineIndex == -1) {
|
if (spineIndex == -1) {
|
||||||
Serial.printf("[%lu] [BMC] addTocEntry: Could not find spine item for TOC href %s\n", millis(), href.c_str());
|
Serial.printf("[%lu] [BMC] addTocEntry: Could not find spine item for TOC href %s\n", millis(), href.c_str());
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user