0

--python3

I have created a word2vec model using gensim libraries and saved it in local disk. I want to upload that file to my s3 bucket. I have successfully created the word2vec model using gensim, but while uploading it to my bucket I get an error.

UnicodeDecodeError: 'utf-8' codec can't decode byte 0x80 in position 0: invalid start byte

some links suggest encoding for avoiding such error. Is that applicable to the word2vec vector model that is created? If so what type of encoding I have to do? If not is there any other way that I could upload the file. Here is my code to upload a file to my s3 bucket

import boto 
from boto.s3.key import Key
from os.path import expanduser

def upload_file(aws_access_key_id, aws_secret_access_key, bucket_name, bucket_folders, path_to_file, file_name, job_id):
    try: 
        conn = boto.connect_s3(aws_access_key_id, aws_secret_access_key)
    except Exception as error: 
        #LOGGER.info( "Cannot upload %s job vector to aws due to connection error in aws")
        #LOGGER.exception(error)
        print("connection error")

    if conn:
        bucket = conn.get_bucket(bucket_name)
        check_file_in_bucket = bucket_folders + file_name
        if bucket.lookup(check_file_in_bucket): 
            # deleting the existing file on server
            (bucket.lookup(check_file_in_bucket)).delete()
        k = Key(bucket)
        k.key = check_file_in_bucket

        upload_file = path_to_file + file_name
        try:

            if os.path.isfile(upload_file):
                print("file present")
                upload_file = open(upload_file, 'r+')
                try:
                    size = os.fstat(upload_file.fileno()).st_size
                except:
                    # Not all file objects implement fileno(),
                    # so we fall back on this
                    file.seek(0, os.SEEK_END)
                    size = file.tell()
                sent = k.set_contents_from_file(upload_file, rewind=True)
                # Rewind for later use
                upload_file.seek(0)
                if sent == size:
                    #LOGGER.info("jobvector model for %s has been sucessfully uploaded", job_id)
                    print(" It worked")
                else:
                    #LOGGER.info("job vector model for %s has not sucessfully uploaded", job_id)
                    print("Try again")
        except Exception as error: 
            #LOGGER.info("Cannot upload %s job vector model as file not found in local disk")
            #LOGGER.exception(error)
            print("file not found in local disk")
    return 0

if __name__ == '__main__':
    MODEL_FOLDER = expanduser("~") + '/modelsdata/job_vectors/'
    BUCKET_FOLDER = 'w2v_model/jobvectors/'
    BUCKET_NAME = 'test-voip'
    aws_access_key_id = CONFIG["aws-s3"]["key_id"]
    aws_secret_access_key = CONFIG["aws-s3"]["key_access"]
    upload_file(aws_access_key_id,aws_secret_access_key,\
        BUCKET_NAME, BUCKET_FOLDER, MODEL_FOLDER, '237091_model', 6789)

I have tried uploading a 'wav' file to my s3 bucket and it was sucessful with the above code.I am having issue with

Raady
  • 1,686
  • 5
  • 22
  • 46

0 Answers0