#!/usr/bin/env python3 # # download all gists for a user, pass github user as first arg # if the gists have already been downloaded before then update them import json import os import re import requests import sys from subprocess import call def slugify(value): value = re.sub('[^\w\s-]', '_', value).strip().lower() value = re.sub('[-\s]+', '_', value) return value if __name__ == '__main__': user = sys.argv[1] r = requests.get('https://api.github.com/users/{0}/gists'.format(user)) for gist in r.json(): fname = next(iter(gist['files'])) gist_dir = fname and slugify(fname) gist_dir = os.path.join('gists', f'{gist_dir}-{gist["id"]}') if not os.path.exists(gist_dir): # if the repo does not exist then do "git clone" call(['git', 'clone', gist['git_pull_url'], gist_dir]) else: # if repo dir already exists then do "git pull" call(['git', '-C', gist_dir, 'pull']) if gist['comments']: comments_file = os.path.join(gist_dir, 'comments.json') with open(comments_file, 'w') as fh_comm: fh_comm.write(json.dumps(requests.get(gist['comments_url']).json(), indent=1, sort_keys=True)) description_file = os.path.join(gist_dir, 'description.txt') with open(description_file, 'w') as fh_desc: fh_desc.write(f"{gist['description']}\n") fh_desc.write(f"json_url: {gist['url']}\n") fh_desc.write(f"html_url: {gist['html_url']}\n") fh_desc.write(f"pull_url: {gist['git_pull_url']}\n") fh_desc.write(f"id: {gist['id']}\n") fh_desc.write(f"date_updated: {gist['updated_at']}\n") fh_desc.write(f"files: {json.dumps(gist['files'], indent=1, sort_keys=True)}\n")