I'm a newbie in Python and Spark world. And am trying to build a pyspark code to send an email from Databricks along with the attachment from the mount point location. I'm using below code to implement the same -
import smtplib
from pathlib import Path
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
from email import encoders
def send_mail(send_from = <from_email>, send_to = <to_email>, subject = "Test", message = "Test", files=["/mnt/<Mounted Point Directory>/"],
server="<SMTP Host>", port=<SMTP Port>, username='<SMTP Username>', password='<SMTP Password>',
use_tls=True):
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach(MIMEText(message))
for path in files:
part = MIMEBase('application', "octet-stream")
with open(path, 'rb') as file:
part.set_payload(file.read())
encoders.encode_base64(part)
part.add_header('Content-Disposition',
'attachment; filename="{}"'.format(Path(path).name))
msg.attach(part)
smtp = smtplib.SMTP(server, port)
if use_tls:
smtp.starttls()
smtp.login(username, password)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.quit()
But for some reason the code is giving me File or directory not exists exception.
Am I missing anything over here.
Thanks