if depth > MAX_DEPTH: continue _, data, found = extract(url) result.append((depth, url, data)) # GIL for url in found: fetchq.put((depth + 1, url)) finally: fetchq.task_done()
depth in range(MAX_DEPTH + 1): batch = yield from ex_multi_async(to_fetch) to_fetch = [] for url, data, found in batch: results.append((depth, url, data)) to_fetch.extend(found) return results