You don't need any external modules. (In fact, you don't need to even import
anything.)
This would chop up large_file.dat
into 50-megabyte pieces and write them to disk – but you could just as well replace the file writing with whatever API call you need.
filename = "large_file.dat"
chunk_size = 50_000_000 # bytes; must fit in memory
chunk_num = 1
with open(filename, "rb") as input_file:
while True:
chunk = input_file.read(chunk_size)
if not chunk: # Nothing more to read, we've reached file end
break
with open(f"{filename}.{chunk_num:04d}", "wb") as output_file:
output_file.write(chunk)
chunk_num += 1