Skip to content

Instantly share code, notes, and snippets.

@alxfordy
Created May 24, 2022 12:51
Show Gist options
  • Save alxfordy/f0956eb5f43c59fed3614c78b07c9e65 to your computer and use it in GitHub Desktop.
Save alxfordy/f0956eb5f43c59fed3614c78b07c9e65 to your computer and use it in GitHub Desktop.

Revisions

  1. alxfordy revised this gist May 24, 2022. 1 changed file with 7 additions and 7 deletions.
    14 changes: 7 additions & 7 deletions tinyman.py
    Original file line number Diff line number Diff line change
    @@ -3,13 +3,13 @@ async def fetch_pool(self, url, session):
    json = await resp.json()
    self.json_pools.extend(json.get("results"))

    async def make_pools_fetch(self, requests_needed: int):
    async with ClientSession() as session:
    tasks = list()
    for request in range(requests_needed):
    url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False"
    tasks.append(self.fetch_pool(url, session))
    await asyncio.gather(*tasks)
    async def make_pools_fetch(self, requests_needed: int):
    async with ClientSession() as session:
    tasks = list()
    for request in range(requests_needed):
    url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False"
    tasks.append(self.fetch_pool(url, session))
    await asyncio.gather(*tasks)

    def refresh_pools(self, limit=1000):
    self._logger.info("Fetching TinyMan Pools")
  2. alxfordy created this gist May 24, 2022.
    25 changes: 25 additions & 0 deletions tinyman.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,25 @@
    async def fetch_pool(self, url, session):
    resp = await session.request(method="GET", url=url)
    json = await resp.json()
    self.json_pools.extend(json.get("results"))

    async def make_pools_fetch(self, requests_needed: int):
    async with ClientSession() as session:
    tasks = list()
    for request in range(requests_needed):
    url = f"https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset={request * 10}&ordering=-liquidity&verified_only=true&with_statistics=False"
    tasks.append(self.fetch_pool(url, session))
    await asyncio.gather(*tasks)

    def refresh_pools(self, limit=1000):
    self._logger.info("Fetching TinyMan Pools")
    r = requests.get("https://mainnet.analytics.tinyman.org/api/v1/pools/?limit=10&offset=0&ordering=-liquidity&verified_only=true&with_statistics=False")
    data = r.json()
    pool_count = data.get("count")
    requests_needed = math.ceil(float(pool_count) / 10)
    loop = asyncio.get_event_loop()
    loop.run_until_complete(self.make_pools_fetch(requests_needed))
    multiprocess_pool = multiprocessing.Pool(processes = multiprocessing.cpu_count()-1)
    self.pools = multiprocess_pool.map(self.get_pool_details, self.json_pools)
    self._logger.info(f"Asyncronously Collected {len(self.json_pools)} Pools")
    return self.pools