I am running the following try-except code:
try:
    paths = file_system_client.get_paths("{0}/{1}/0/{2}/{3}/{4}".format(container_initial_folder, container_second_folder, chronological_date[0], chronological_date[1], chronological_date[2]), recursive=True)
    list_of_paths=["abfss://{0}@{1}.dfs.core.windows.net/".format(storage_container_name, storage_account_name)+path.name for path in paths if ".avro" in path.name]
except Exception as e:
    if e=="AccountIsDisabled":
        pass
    else:
        print(e)
I want neither to print the following error if my try-except fells upon it nor to stop my program execution if I fell upon this error:
"(AccountIsDisabled) The specified account is disabled.
RequestId:3159a59e-d01f-0091-5f71-2ff884000000
Time:2020-05-21T13:09:03.3540242Z"
I just want to overpass it and print any other error/exception (eg. TypeError, ValueError, etc) that will occur.
Is this feasible in Python 3? 
Please note that the .get_paths() method belongs to the azure.storage.filedatalake module which enables direct connection of Python with Azure Data Lake for path extraction.
I am giving the note to pinpoint that the Exception I am trying to bypass is not a built-in Exception.
[Update] In sort after following the proposed attached answers I modified my code to this:
import sys
from concurrent.futures import ThreadPoolExecutor
from azure.storage.filedatalake._models import StorageErrorException
from azure.storage.filedatalake import DataLakeServiceClient, DataLakeFileClient
storage_container_name="name1" #confidential
storage_account_name="name2" #confidential
storage_account_key="password" #confidential 
container_initial_folder="name3" #confidential
container_second_folder="name4" #confidential
def datalake_connector(storage_account_name, storage_account_key):
    global service_client
    datalake_client = DataLakeServiceClient(account_url="{0}://{1}.dfs.core.windows.net".format("https", storage_account_name), credential=storage_account_key)
    print("Client successfuly created!")
    return datalake_client
def create_list_paths(chronological_date, 
                      container_initial_folder="name3", 
                      container_second_folder="name4", 
                      storage_container_name="name1", 
                      storage_account_name="name2"
                      ):
    list_of_paths=list()
    print("1. success")
    paths = file_system_client.get_paths("{0}/{1}/0/{2}/{3}/{4}".format(container_initial_folder, container_second_folder, chronological_date[0], chronological_date[1], chronological_date[2]), recursive=True)
    print("2. success")
    list_of_paths=["abfss://{0}@{1}.dfs.core.windows.net/".format(storage_container_name, storage_account_name)+path.name for path in paths if ".avro" in path.name]
    print("3. success")
    list_of_paths=functools.reduce(operator.iconcat, result, [])
    return list_of_paths
service_client = datalake_connector(storage_account_name, storage_account_key)
file_system_client = service_client.get_file_system_client(file_system=storage_container_name)
try:
    list_of_paths=[]
    executor=ThreadPoolExecutor(max_workers=8)
    print("Start path extraction!")
    list_of_paths=[executor.submit(create_list_paths, i, container_initial_folder, storage_container_name, storage_account_name).result() for i in date_list]
except:
    print("no success")
    print(sys.exc_info())
Unfortunately the StorageErrorException cannot be handled for a reason, I am still getting the following stdout:

 
     
    