def next_requests(self):
found = 0
# TODO: Use redis pipeline execution.
while found < self.redis_batch_size:
req = self.crawler.engine.slot.scheduler.queue.pop(0)
if not req:
# Queue empty.
break
if req:
yield req
found += 1
else:
self.logger.debug("Request not made from data: %r", req.url)
if found:
self.logger.debug("Read %s requests from '%s'", found, self.redis_key)