我会这样:
filename = 'filename.gzip' # size 90 GB
hdf_fn = 'result.h5'
hdf_key = 'my_huge_df'
cols = ['colA','colB','colC','ColZ'] # put here a list of all your columns
cols_to_index = ['colA','colZ'] # put here the list of YOUR columns, that you want to index
chunksize = 10**6 # you may want to adjust it ...
store = pd.hdfstore(hdf_fn)
for chunk in pd.read_table(filename, compression='gzip', header=None, names=cols, chunksize=chunksize):
# don't index data columns in each iteration - we'll do it later
store.append(hdf_key, chunk, data_columns=cols_to_index, index=False)
# index data columns in hdfstore
store.create_table_index(hdf_key, columns=cols_to_index, optlevel=9, kind='full')
store.close()