[varStore] Handle >65535 items per encoding

By creating a new major for each 65535.
This commit is contained in:
Behdad Esfahbod 2023-10-23 13:41:16 -06:00
parent 6e1f55b0c8
commit c7ce5b0f3c
2 changed files with 42 additions and 8 deletions

View File

@ -619,12 +619,19 @@ def VarStore_optimize(self, use_NO_VARIATION_INDEX=True, quantization=1):
back_mapping = {} # Mapping from full rows to new VarIdxes
encodings.sort(key=_Encoding.width_sort_key)
self.VarData = []
for major, encoding in enumerate(encodings):
for encoding in encodings:
items = sorted(encoding.items)
while items:
major = len(self.VarData)
data = ot.VarData()
self.VarData.append(data)
data.VarRegionIndex = range(n)
data.VarRegionCount = len(data.VarRegionIndex)
data.Item = sorted(encoding.items)
# Each major can only encode up to 0xFFFF entries.
data.Item, items = items[:0xFFFF], items[0xFFFF:]
for minor, item in enumerate(data.Item):
back_mapping[item] = (major << 16) + minor

View File

@ -256,3 +256,30 @@ def test_quantize(quantization, expectedBytes):
data = writer.getAllData()
assert len(data) == expectedBytes, xml
def test_optimize_overflow():
numRegions = 1
locations = [{"wght": 0}, {"wght": 0.5}]
axisTags = ["wght"]
model = VariationModel(locations)
builder = OnlineVarStoreBuilder(axisTags)
builder.setModel(model)
for data in range(0, 0xFFFF * 2):
data = [0, data]
builder.storeMasters(data)
varStore = builder.finish()
varStore.optimize()
for s in varStore.VarData:
print(len(s.Item))
# 5 data-sets:
# - 0..127: 1-byte dataset
# - 128..32767: 2-byte dataset
# - 32768..32768+65535-1: 4-byte dataset
# - 32768+65535..65535+65535-1: 4-byte dataset
assert len(varStore.VarData) == 4