I am running into an issue when trying to removing duplicates from a list.
def my_list_bucket(self, bucketName,  limit=sys.maxsize): #delimiter='/'):
    a_bucket = self.storage_client.lookup_bucket(bucketName)
    bucket_iterator = a_bucket.list_blobs()
        for resource in bucket_iterator:
            path_parts = resource.name.split('/')
            date_folder = path_parts[0]
            publisher_folder = path_parts[1]
            desired_path = date_folder + '/' + publisher_folder + '/'
            new_list = []
            for path in desired_path:
                if desired_path not in new_list:
                    new_list.append(desired_path)
            print(new_list)
            limit = limit - 1
            if limit <= `0:
                break
This is the results I get:
20230130/adelphic/
20230130/adelphic/
20230130/adelphic/
20230130/adelphic/
20230130/instacart/
20230130/instacart/
20230130/instacart/
20230130/instacart/
Its not removing the duplicates from the list as the duplicates are still there.
The results I want is:
20230130/adelphic/
20230130/instacart/
I have tried new_list = list(set(publisher_folder)) and it returns:
'i', 'p', 'a', 'c', 'd', 'h', 'e', 'l'
'i', 'p', 'a', 'c', 'd', 'h', 'e', 'l'
'i', 'p', 'a', 'c', 'd', 'h', 'e', 'l'
 
    