Skip to content

Instantly share code, notes, and snippets.

@kalloc
Last active June 9, 2022 20:30
Show Gist options
  • Save kalloc/26ee3605e42ae729a4d6f4a401e858c9 to your computer and use it in GitHub Desktop.
Save kalloc/26ee3605e42ae729a4d6f4a401e858c9 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import sys
import csv
import typing
import requests
all_owners: typing.List[str] = []
contract_id = None
if len(sys.argv) > 1:
contract_id = sys.argv[1]
def crawler(contract_id=None, skip=0):
url = f'https://api-v2-mainnet.paras.id/token?'\
f'exclude_total_burn=true&__skip={skip}&__limit=100'
if contract_id:
url = f'{url}&contract_id={contract_id}'
print(url)
r = requests.get(url)
dct = r.json()
has_more = len(dct["data"]["results"]) == 100
print(has_more)
return [
[{
"owner_id": item["owner_id"],
"contract_id": item["contract_id"],
"price": item["price"],
"creator_id": item["metadata"]["creator_id"]
}
for item in dct["data"]["results"]
],
has_more
]
offset = 0
fd = open(f"{contract_id or 'all'}.csv", "w")
dw = csv.DictWriter(fd, ["owner_id", "contract_id", "price", "creator_id"])
dw.writeheader()
while True:
[data, has_more] = crawler(contract_id, offset)
if not has_more:
break
offset = offset + len(data)
if data:
dw.writerows(data)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment