Created
May 24, 2022 12:51
-
-
Save alxfordy/f0956eb5f43c59fed3614c78b07c9e65 to your computer and use it in GitHub Desktop.
Revisions
-
alxfordy revised this gist
May 24, 2022 . 1 changed file with 7 additions and 7 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -3,13 +3,13 @@ async def fetch_pool(self, url, session): json = await resp.json() self.json_pools.extend(json.get("results")) async def make_pools_fetch(self, requests_needed: int): async with ClientSession() as session: tasks = list() for request in range(requests_needed): url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False" tasks.append(self.fetch_pool(url, session)) await asyncio.gather(*tasks) def refresh_pools(self, limit=1000): self._logger.info("Fetching TinyMan Pools") -
alxfordy created this gist
May 24, 2022 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,25 @@ async def fetch_pool(self, url, session): resp = await session.request(method="GET", url=url) json = await resp.json() self.json_pools.extend(json.get("results")) async def make_pools_fetch(self, requests_needed: int): async with ClientSession() as session: tasks = list() for request in range(requests_needed): url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False" tasks.append(self.fetch_pool(url, session)) await asyncio.gather(*tasks) def refresh_pools(self, limit=1000): self._logger.info("Fetching TinyMan Pools") r = requests.get("https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset=0&ordering=-liquidity&verified_only=true&with_statistics=False") data = r.json() pool_count = data.get("count") requests_needed = math.ceil(float(pool_count) / 10) loop = asyncio.get_event_loop() loop.run_until_complete(self.make_pools_fetch(requests_needed)) multiprocess_pool = multiprocessing.Pool(processes = multiprocessing.cpu_count()-1) self.pools = multiprocess_pool.map(self.get_pool_details, self.json_pools) self._logger.info(f"Asyncronously Collected {len(self.json_pools)} Pools") return self.pools