Refactor removeDuplicateGradients to loop until it reaches a fixed point

This is commits enables a future optimization (but is not a notable
optimization in itself).

Signed-off-by: Niels Thykier <niels@thykier.net>
This commit is contained in:
Niels Thykier 2020-05-21 13:53:06 +00:00
parent a3f761f40c
commit 0e82b8dcad
No known key found for this signature in database
GPG key ID: A65B78DBE67C7AAC

View file

@ -1617,18 +1617,21 @@ def dedup_gradient(master_id, duplicates_ids, duplicates, referenced_ids):
def removeDuplicateGradients(doc):
global _num_elements_removed
prev_num = -1
num = 0
while prev_num != num:
prev_num = num
linear_gradients = doc.getElementsByTagName('linearGradient')
radial_gradients = doc.getElementsByTagName('radialGradient')
# get a collection of all elements that are referenced and their referencing elements
referencedIDs = findReferencedElements(doc.documentElement)
referenced_ids = findReferencedElements(doc.documentElement)
for master_id, duplicates_ids, duplicates in detect_duplicate_gradients(linear_gradients, radial_gradients):
dedup_gradient(master_id, duplicates_ids, duplicates, referencedIDs)
_num_elements_removed += len(duplicates)
dedup_gradient(master_id, duplicates_ids, duplicates, referenced_ids)
num += len(duplicates)
return num
@ -3775,8 +3778,7 @@ def scourString(in_string, options=None):
pass
# remove duplicate gradients
while removeDuplicateGradients(doc) > 0:
pass
_num_elements_removed += removeDuplicateGradients(doc)
if options.group_collapse:
_num_elements_removed += mergeSiblingGroupsWithCommonAttributes(doc.documentElement)