Currently I am uploading file using angular2-http-file-upload
and send it to django back-end and it work fine with small files here is how I done it but when uploading large file 35Mb-600Mb its throwing exception from django sometimes it run fine local server but on cloud it always throwing exception
Edited: I already set the client max body size in nginx.conf file
my-upload-item.ts
import { UploadItem } from 'angular2-http-file-upload';
export class MyUploadItem extends UploadItem {
constructor(file: any) {
super();
this.url = 'http://127.0.0.1:8000/training/save/';
this.file = file;
}
}
my-component.ts
submit2() {
// ///////////////
// Upload File
// ///////////////
let uploadTrainingFile = (<HTMLInputElement>window.document.getElementById('fileInput1')).files[0];
console.log('file tarining ', uploadTrainingFile);
let myUploadItem1 = new MyUploadItem(uploadTrainingFile);
myUploadItem1.formData = { FormDataKey: 'Form Data Value', 'category': this.category, 'type': 'Training File' }; // (optional) form data can be sent with file
this.uploaderService.onSuccessUpload = (item, response, status, headers) => {
// success callback
console.log('successs ', response.path, response.type);
console.log('seed step')
};
this.uploaderService.onErrorUpload = (item, response, status, headers) => {
// error callback
console.log('error ', response);
};
this.uploaderService.onCompleteUpload = (item, response, status, headers) => {
// complete callback, called regardless of success or failure
console.log('callback ', response);
};
this.uploaderService.onProgressUpload = (item, percentComplete) => {
// progress callback
console.log('progresss ', percentComplete)
};
if (typeof uploadTrainingFile != 'undefined' ) {
this.uploaderService.upload(myUploadItem1);
console.log('uploaded');
// if (this.uploaderService.onProgressUpload)
}
console.log('end func submit 2');
}
Django View to Save File
@csrf_exempt
@api_view(['POST'])
def save_file(request):
try:
# print ('Save File ', request.FILES)
category = request.data['category']
type = request.data['type']
file = request.FILES['file']
save_file_addresss = './training_files/' + category + '/' + file.name
os.makedirs(os.path.dirname(save_file_addresss), exist_ok=True)
with open(save_file_addresss, 'a+') as sv_file:
for line in file:
sv_file.write((line.decode("utf-8")).rstrip() + '\n')
return Response({'path': save_file_addresss, 'type': type}, status=status.HTTP_200_OK)
except:
return Response({'msg': 'fail'}, status=status.HTTP_400_BAD_REQUEST)
Edited: nginx.conf I am using nginx and its configration is
server {
listen 80 ;
server_name reviewsai.com www.reviewsai.com;
location / {
proxy_pass http://150.60.20.100:8100;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
client_max_body_size 500M;
}
}
gunicorn file is :
#!/bin/bash
# Start Gunicorn processes
echo Starting Gunicorn.
exec gunicorn ReviewsAI.wsgi:application \
--bind 0.0.0.0:8000 \
--workers 3
--timeout 3600
Django Setings are :
FILE_UPLOAD_HANDLERS = [
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
FILE_UPLOAD_MAX_MEMORY_SIZE = 618659840