I'm new to multithreading with python and I've just tried the following solution. The problem is that the output file is empty each time and I can't find the problem :
from concurrent.futures import ThreadPoolExecutor
import threading
csv_writer_lock = threading.Lock()
def get_data(searchKey):
  #do some data scraping
  l1.append(data)
  l2.append(data)
  l = l1 + l2
  with csv_writer_lock:
    with open("results.csv", mode="a") as f1:
      writer = csv.writer(f1, delimiter=",")
      writer.writerows(l)
    f1.close()
  return l
def set_up_threads(urls):
  try:
    
    with ThreadPoolExecutor(max_workers=5) as executor:
        result = executor.map(get_data, urls["links"],timeout = 300)
  except Exception:
      print("Error")
  return result
df = pd.read_csv(file_path)
final = set_up_threads(df)
Somebody can point me in the direction of the error in this solution? thank you!