你可以将数据分成200个批次,处理它,在前一个批次完成之前不要处理下一个批次。
由于您的问题中没有详细的函数,我假设您尝试使用post方法调用远程函数your_function_url:
import time
import requests
import concurrent.futures
def read_array(data):
response = requests.post('your_function_url', json=request_data)
print(response.status_code)
def send_requests_to_azure(batch):
with concurrent.futures.ThreadPoolExecutor() as executor:
executor.map(read_array, batch)
concurrent.futures.wait(tasks)
array_of_objects = [ ... ] # Your array of data
batch_size = 200
for i in range(0, len(array_of_objects), batch_size):
current_batch = array_of_objects[i:i+batch_size]
send_requests_to_azure(current_batch)