defread_in_chunks(file_object, chunk_size=1024): """Lazy function (generator) to read a file piece by piece. Default chunk size: 1k.""" whileTrue: data = file_object.read(chunk_size) ifnot data: break yield data
defprocess_data(piece): print("piece of data is processing.")
f = open('really_big_file.dat')
for piece in read_in_chunks(f): process_data(piece)