Skip to content

Instantly share code, notes, and snippets.

@SavvyGuard
Last active September 10, 2024 06:53
Show Gist options
  • Save SavvyGuard/6115006 to your computer and use it in GitHub Desktop.
Save SavvyGuard/6115006 to your computer and use it in GitHub Desktop.

Revisions

  1. SavvyGuard revised this gist Aug 20, 2013. 1 changed file with 0 additions and 1 deletion.
    1 change: 0 additions & 1 deletion botos3upload.py
    Original file line number Diff line number Diff line change
    @@ -51,7 +51,6 @@ def percent_cb(complete, total):
    fp_num += 1
    print "uploading part %i" %fp_num
    mp.upload_part_from_file(fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE)
    fp.seek(PART_SIZE, 1)

    mp.complete_upload()

  2. SavvyGuard revised this gist Aug 19, 2013. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions botos3upload.py
    Original file line number Diff line number Diff line change
    @@ -5,8 +5,8 @@
    import sys

    # Fill these in - you get them when you sign up for S3
    AWS_ACCESS_KEY_ID = 'AKIAI7YMNZS7PJ2GZ6UA'
    AWS_ACCESS_KEY_SECRET = '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'
    AWS_ACCESS_KEY_ID = ''
    AWS_ACCESS_KEY_SECRET = ''
    # Fill in info on data to upload
    # destination bucket name
    bucket_name = 'jwu-testbucket'
  3. SavvyGuard revised this gist Aug 19, 2013. 1 changed file with 52 additions and 26 deletions.
    78 changes: 52 additions & 26 deletions botos3upload.py
    Original file line number Diff line number Diff line change
    @@ -1,37 +1,63 @@
    import boto
    import boto.s3

    # Fill these in - you get them when you sign up for S3
    AWS_USER_NAME = ''
    AWS_ACCESS_KEY_ID = ''
    AWS_SECRET_ACCESS_KEY = ''

    bucket_name = 'test'
    dirpath = 'data/Standard/'
    import os.path
    import sys

    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    # Fill these in - you get them when you sign up for S3
    AWS_ACCESS_KEY_ID = 'AKIAI7YMNZS7PJ2GZ6UA'
    AWS_ACCESS_KEY_SECRET = '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'
    # Fill in info on data to upload
    # destination bucket name
    bucket_name = 'jwu-testbucket'
    # source directory
    sourceDir = 'testdata/'
    # destination directory name (on s3)
    destDir = ''

    #max size in bytes before uploading in parts. between 1 and 5 GB recommended
    MAX_SIZE = 20 * 1000 * 1000
    #size of parts when uploading in parts
    PART_SIZE = 6 * 1000 * 1000

    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_ACCESS_KEY_SECRET)

    import boto.s3
    bucket = conn.create_bucket(bucket_name,
    location=boto.s3.connection.Location.DEFAULT)

    from os import walk

    testfilenames = []
    for (dirpath, dirname, filename) in walk(dirpath):
    testfilenames.extend(filename)
    uploadFileNames = []
    for (sourceDir, dirname, filename) in os.walk(sourceDir):
    uploadFileNames.extend(filename)
    break

    for testfile in testfilenames:
    def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

    for filename in uploadFileNames:
    sourcepath = os.path.join(sourceDir + filename)
    destpath = os.path.join(destDir, filename)
    print 'Uploading %s to Amazon S3 bucket %s' % \
    (dirpath + testfile, bucket_name)

    import sys
    def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

    from boto.s3.key import Key
    k = Key(bucket)
    k.key = 'data/Standard/' + testfile
    k.set_contents_from_filename(dirpath + testfile,
    cb=percent_cb, num_cb=10)
    (sourcepath, bucket_name)

    filesize = os.path.getsize(sourcepath)
    if filesize > MAX_SIZE:
    print "multipart upload"
    mp = bucket.initiate_multipart_upload(destpath)
    fp = open(sourcepath,'rb')
    fp_num = 0
    while (fp.tell() < filesize):
    fp_num += 1
    print "uploading part %i" %fp_num
    mp.upload_part_from_file(fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE)
    fp.seek(PART_SIZE, 1)

    mp.complete_upload()

    else:
    print "singlepart upload"
    k = boto.s3.key.Key(bucket)
    k.key = destpath
    k.set_contents_from_filename(sourcepath,
    cb=percent_cb, num_cb=10)
  4. SavvyGuard revised this gist Aug 19, 2013. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions botos3upload.py
    Original file line number Diff line number Diff line change
    @@ -17,8 +17,8 @@
    from os import walk

    testfilenames = []
    for (dirpath, dirnames, filenames) in walk(dirpath):
    testfilenames.extend(filenames)
    for (dirpath, dirname, filename) in walk(dirpath):
    testfilenames.extend(filename)
    break

    for testfile in testfilenames:
  5. James revised this gist Jul 30, 2013. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion botos3upload.py
    Original file line number Diff line number Diff line change
    @@ -5,7 +5,7 @@
    AWS_ACCESS_KEY_ID = ''
    AWS_SECRET_ACCESS_KEY = ''

    bucket_name = 'dvargas'
    bucket_name = 'test'
    dirpath = 'data/Standard/'

    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
  6. James renamed this gist Jul 30, 2013. 1 changed file with 0 additions and 0 deletions.
    File renamed without changes.
  7. James created this gist Jul 30, 2013.
    37 changes: 37 additions & 0 deletions gistfile1.txt
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,37 @@
    import boto

    # Fill these in - you get them when you sign up for S3
    AWS_USER_NAME = ''
    AWS_ACCESS_KEY_ID = ''
    AWS_SECRET_ACCESS_KEY = ''

    bucket_name = 'dvargas'
    dirpath = 'data/Standard/'

    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

    import boto.s3
    bucket = conn.create_bucket(bucket_name,
    location=boto.s3.connection.Location.DEFAULT)

    from os import walk

    testfilenames = []
    for (dirpath, dirnames, filenames) in walk(dirpath):
    testfilenames.extend(filenames)
    break

    for testfile in testfilenames:
    print 'Uploading %s to Amazon S3 bucket %s' % \
    (dirpath + testfile, bucket_name)

    import sys
    def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

    from boto.s3.key import Key
    k = Key(bucket)
    k.key = 'data/Standard/' + testfile
    k.set_contents_from_filename(dirpath + testfile,
    cb=percent_cb, num_cb=10)