@@ -1017,20 +1017,19 @@ def __distribute_to_results(self, item_id, response, item):
10171017
10181018 if item_id in response .data ["success" ]:
10191019 self .results ["succeeded" ].append (item )
1020+ response .data ["success" ].discard (item_id )
10201021 elif item_id in response .data ["skipped" ]:
10211022 self .results ["skipped" ].append (item )
1022- else :
1023+ response .data ["skipped" ].discard (item_id )
1024+ elif item_id in response .data ["failed" ]:
10231025 self .results ["failed" ].append (item )
1026+ response .data ["failed" ].discard (item_id )
10241027
10251028 def validate_items (
10261029 self ,
10271030 ):
10281031
10291032 filtered_items = self .__filter_duplicates ()
1030- if len (filtered_items ) != len (self .items ):
1031- self .reporter .log_info (
1032- f"Dropping duplicates. Found { len (filtered_items )} / { len (self .items )} unique items."
1033- )
10341033 self .items = filtered_items
10351034 self .items = self .__filter_invalid_items ()
10361035 self .__separate_to_paths ()
@@ -1077,10 +1076,15 @@ def execute(
10771076
10781077 response = None
10791078
1080- for i in range (0 , len (self .item_ids ), self .CHUNK_SIZE ):
1079+ unique_item_ids = list (set (self .item_ids ))
1080+ if len (self .items ) > len (unique_item_ids ):
1081+ self .reporter .log_info (
1082+ f"Dropping duplicates. Found { len (unique_item_ids )} / { len (self .items )} unique items."
1083+ )
1084+ for i in range (0 , len (unique_item_ids ), self .CHUNK_SIZE ):
10811085 tmp_response = self ._service_provider .subsets .add_items (
10821086 project = self .project ,
1083- item_ids = self . item_ids [i : i + self .CHUNK_SIZE ], # noqa
1087+ item_ids = unique_item_ids [i : i + self .CHUNK_SIZE ], # noqa
10841088 subset = subset ,
10851089 )
10861090
0 commit comments