Skip to content

Instantly share code, notes, and snippets.

@alxfordy
Created May 24, 2022 12:51
Show Gist options
  • Save alxfordy/f0956eb5f43c59fed3614c78b07c9e65 to your computer and use it in GitHub Desktop.
Save alxfordy/f0956eb5f43c59fed3614c78b07c9e65 to your computer and use it in GitHub Desktop.
Async TinyMan Pools Fetch
async def fetch_pool(self, url, session):
resp = await session.request(method="GET", url=url)
json = await resp.json()
self.json_pools.extend(json.get("results"))
async def make_pools_fetch(self, requests_needed: int):
async with ClientSession() as session:
tasks = list()
for request in range(requests_needed):
url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False"
tasks.append(self.fetch_pool(url, session))
await asyncio.gather(*tasks)
def refresh_pools(self, limit=1000):
self._logger.info("Fetching TinyMan Pools")
r = requests.get("https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset=0&ordering=-liquidity&verified_only=true&with_statistics=False")
data = r.json()
pool_count = data.get("count")
requests_needed = math.ceil(float(pool_count) / 10)
loop = asyncio.get_event_loop()
loop.run_until_complete(self.make_pools_fetch(requests_needed))
multiprocess_pool = multiprocessing.Pool(processes = multiprocessing.cpu_count()-1)
self.pools = multiprocess_pool.map(self.get_pool_details, self.json_pools)
self._logger.info(f"Asyncronously Collected {len(self.json_pools)} Pools")
return self.pools
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment