2015-03-03 32 views
1

我正嘗試使用並行上傳將大文件上傳到我的存儲桶。我遇到了來自here的代碼,並決定使用它,因爲它非常簡單易懂。然而在運行程序給我的博託S3響應錯誤:訪問被拒絕

boto.exception.S3ResponseError: S3ResponseError: 403 Forbidden 
<?xml version="1.0" encoding="UTF-8"?> 
<Error><Code>AccessDenied</Code><Message>Access Denied</Message><RequestId>BF24672A4459F15E</RequestId><HostId>SN94E8Sg3QeiNQdOoB0CNZmAKZkVSrae8ORBOcjN9mKl07LjYV8hHhNG5Ox2f2bC</HostId></Error> 

我經過這裏給出的計算器上所有的不同的解決方案去了一個錯誤,他們沒有被認爲是問題。我可以完全訪問存儲桶,並且可以連續讀取,寫入和刪除存儲桶中的文件,並且只有在使用此代碼時纔會出現此錯誤。 s3cmd也工作正常,並顯示沒有錯誤。任何幫助將不勝感激。代碼和堆棧跟蹤粘貼下面:

代碼:

import math 
from multiprocessing.dummy import Pool #using dummy for debugging 
import os 

from boto.s3.connection import S3Connection 
from filechunkio import FileChunkIO 
from ConfigParser import RawConfigParser, NoOptionError 

config = RawConfigParser() 
config.read('tm/aws.cfg') 

#conn = S3Connection(config.get('prodAws', 'aws_access_key_id'), config.get('prodAws', 'aws_secret_access_key')) 
acs_key = config.get('prodAws', 'aws_access_key_id') 
sec_key = config.get('prodAws', 'aws_secret_access_key') 
try: 
    default_bucket = config.get('prodAws', 'bucket') 
except NoOptionError, e: 
    print("Configuration error({0})".format(e.message)) 
    exit() 

def _upload_part(bucketname, aws_key, aws_secret, multipart_id, part_num, 
       keyname, offset, bytes, amount_of_retries = 5): 
    """ 
    Uploads a part with retries. 
    """ 
    def _upload(retries_left=amount_of_retries): 
     try: 
      print('Start uploading part #%d ...' % part_num) 
      conn = S3Connection(aws_key, aws_secret) 
      bucket = conn.get_bucket(bucketname, validate=False) 
      for mp in bucket.get_all_multipart_uploads(): 
       if mp.id == multipart_id: 
        with FileChunkIO(keyname, 'r', offset=offset, bytes=bytes) as fp: 
         mp.upload_part_from_file(fp=fp, part_num=part_num) 
        break 
     except Exception as e: 
      print e 
      if retries_left: 
       _upload(retries_left = retries_left - 1) 
      else: 
       print('Failed uploading part #%d' % part_num) 
       raise e 
     else: 
      print('Uploaded part #%d' % part_num) 

    _upload() 


def upload(bucketname, aws_key, aws_secret, keyname, parallel_processes=5): 
    """ 
    Parallel multipart upload. 
    """ 
    conn = S3Connection(aws_key, aws_secret) 
    bucket = conn.get_bucket(bucketname, validate=False) 

    mp = bucket.initiate_multipart_upload(keyname) 

    source_size = os.stat(keyname).st_size 
    bytes_per_chunk = max(int(math.sqrt(5242880) * math.sqrt(source_size)), 5242880) 
    chunk_amount = int(math.ceil(source_size/float(bytes_per_chunk))) 

    pool = Pool(processes=parallel_processes) 
    for i in range(chunk_amount): 
     offset = i * bytes_per_chunk 
     remaining_bytes = source_size - offset 
     bytes = min([ bytes_per_chunk, remaining_bytes ]) 
     part_num = i + 1 
     #_upload_part(bucketname, aws_key, aws_secret, mp.id, part_num, 
       #keyname, offset, bytes) 
     pool.apply_async(_upload_part, [bucketname, aws_key, aws_secret, mp.id, 
             part_num, keyname, offset, bytes]) 
    pool.close() 
    pool.join() 

    if len(mp.get_all_parts()) == chunk_amount: 
     mp.complete_upload() 
     key = bucket.get_key(keyname) 
    else: 
     mp.cancel_upload() 

upload(default_bucket, acs_key, sec_key, 'bigfile.txt') 

堆棧跟蹤:

File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 783, in __bootstrap 
    self.__bootstrap_inner() 
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 810, in __bootstrap_inner 
    self.run() 
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 763, in run 
    self.__target(*self.__args, **self.__kwargs) 
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/multiprocessing/pool.py", line 113, in worker 
    result = (True, func(*args, **kwds)) 
 File "/home/desktop/s3/parup.py", line 46, in _upload_part 
    _upload() 
 File "/home/desktop/s3/parup.py", line 39, in _upload 
    _upload(retries_left = retries_left - 1) 
 File "/home/desktop/s3/parup.py", line 39, in _upload 
    _upload(retries_left = retries_left - 1) 
 File "/home/desktop/s3/parup.py", line 39, in _upload 
    _upload(retries_left = retries_left - 1) 
 File "/home/desktop/s3/parup.py", line 39, in _upload 
    _upload(retries_left = retries_left - 1) 
 File "/home/desktop/s3/parup.py", line 39, in _upload 
    _upload(retries_left = retries_left - 1) 
 File "/home/desktop/s3/parup.py", line 42, in _upload 
    raise e 
+0

該代碼似乎是從名爲'tm/aws.cfg'的文件中提取證書 - 您是否將憑證放置在那裏?或者,如果您從已分配角色的EC2實例或可以使用AWS CLI的計算機運行代碼,則只需使用'S3Connection()'而不是將憑據傳遞給API調用。它會找到你使用's3cmd'的憑據(假設)。 – 2015-03-05 05:59:59

回答

0

修正問題:

的問題是與for mp in bucket.get_all_multipart_uploads()。我刪除它,並將if mp.id == multipart.id更改爲if multipart.id