I am trying to concatenate three separate dataframes (bs_df, income_df, cash_df) of each ticker then append them into a dataframe df. It successfully appends the dataframe the first time it goes through the for loop, but when it tries to append the new dataframe to the original dataframe, it fails to do so and throws an Assertion Error. I think it's because the dataframes have different columns (reference), but I cannot figure out a way to bypass this and still create a dataframe with all the column names and fill in with NaN values if a dataframe does not have the value for a specific column that the other dataframe has.
Here is the code I am running:
from yahoofinancials import YahooFinancials
import pandas as pd
tickers = ["PIH","AAPL","AMZN"]
df = pd.DataFrame()
for ticker in tickers:
    print (ticker)
    # Updates every loop
    ticker_yahoo_financials = YahooFinancials(ticker)
    # Get financial info
    bs_hist = ticker_yahoo_financials.get_financial_stmts('annual', 'balance')
    income_hist = ticker_yahoo_financials.get_financial_stmts('annual', 'income')
    cash_hist = ticker_yahoo_financials.get_financial_stmts('annual', 'cash')
    # Create dataframes and comebine them
    bs_df = pd.DataFrame(list(bs_hist['balanceSheetHistory'][ticker][0].values()))
    income_df = pd.DataFrame(list(income_hist['incomeStatementHistory'][ticker][0].values()))
    cash_df = pd.DataFrame(list(cash_hist['cashflowStatementHistory'][ticker][0].values()))
    comb = pd.concat([bs_df, income_df, cash_df], axis=1)
    df = df.append(comb)
Here is the error message:
---------------------------------------------------------------------------
AssertionError                            Traceback (most recent call last)
<ipython-input-23-f476ca9fbb70> in <module>()
     19     cash_df = pd.DataFrame(list(cash_hist['cashflowStatementHistory'][ticker][0].values()))
     20     comb = pd.concat([bs_df, income_df, cash_df], axis=1)
---> 21     df = df.append(comb)
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\frame.py in append(self, other, ignore_index, verify_integrity)
   5192             to_concat = [self, other]
   5193         return concat(to_concat, ignore_index=ignore_index,
-> 5194                       verify_integrity=verify_integrity)
   5195 
   5196     def join(self, other, on=None, how='left', lsuffix='', rsuffix='',
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\reshape\concat.py in concat(objs, axis, join, join_axes, ignore_index, keys, levels, names, verify_integrity, copy)
    211                        verify_integrity=verify_integrity,
    212                        copy=copy)
--> 213     return op.get_result()
    214 
    215 
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\reshape\concat.py in get_result(self)
    406             new_data = concatenate_block_managers(
    407                 mgrs_indexers, self.new_axes, concat_axis=self.axis,
--> 408                 copy=self.copy)
    409             if not self.copy:
    410                 new_data._consolidate_inplace()
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\internals.py in concatenate_block_managers(mgrs_indexers, axes, concat_axis, copy)
   5205         blocks.append(b)
   5206 
-> 5207     return BlockManager(blocks, axes)
   5208 
   5209 
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\internals.py in __init__(self, blocks, axes, do_integrity_check, fastpath)
   3031 
   3032         if do_integrity_check:
-> 3033             self._verify_integrity()
   3034 
   3035         self._consolidate_check()
C:\ProgramData\Anaconda3\lib\site-packages\pandas\core\internals.py in _verify_integrity(self)
   3247                                  'block items\n# manager items: {0}, # '
   3248                                  'tot_items: {1}'.format(
-> 3249                                      len(self.items), tot_items))
   3250 
   3251     def apply(self, f, axes=None, filter=None, do_integrity_check=False,
AssertionError: Number of manager items must equal union of block items
# manager items: 67, # tot_items: 68
 
     
    