I want to get data as json format. Right now I am getting data as dictionary which little bit messy for me. Here is my code:
my_dict = {"job_title":[],"time_posted":[],"number_of_proposal":[],"page_link":[]};
for page_num in range(1, 12):
    time.sleep(3)
    url = (
        f'my_url').format(page_num)
    print(url)
    headers = requests.utils.default_headers()
    print(headers)
    headers.update(
        {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0', })
    print(headers)
    r = requests.get(url, headers=headers).text
    soup = BeautifulSoup(r, 'lxml')
    box = soup.select('.item__top_container⤍ListItem⤚3pRrO')
    for i in box:
        job_title = i.select('.item__title⤍ListItem⤚2FRMT')[0].text.lower()
        job_title = job_title.replace('opportunity', ' opportunity').replace(
            'urgent', ' urgent').strip()
        print(job_title)
        time_posted = i.select('time')[0].text.lower()
        remove_month_year = ["month", "year"]
        print(time_posted)
        proposal = i.select(
            '.item__info⤍ListItem⤚1ci50 li:nth-child(3)')[0].text.replace('Proposals', '').strip()
        keywords = ['scrap', 'data mining']
        if(any(key_words in job_title for key_words in keywords)):
            if(not any(remove_m_y in time_posted for remove_m_y in remove_month_year)):
                   my_dict["job_title"].append(job_title)
                   my_dict["time_posted"].append(time_posted)
                   my_dict["number_of_proposal"].append(proposal)
                   my_dict["page_link"].append(url)
my dictionary data look like this:
{'job_title': ['web scraping of product reviews', 'yell web scraping in python', 'google business scraping',],'time_posted': ['6 days ago', '9 days ago', '3 days ago'], 'page_link': ['url1','url2','url3']}
My expected result will be look like this:
{"job_title":"web scraping of product reviews","time_posted":"6 days ago","page_link":"url1"},{"job_title":"yell web scraping in python","time_posted":"9 days ago","page_link":"url2"}
 
     
     
    