django-helpdeskmig/helpdesk/email.py

910 lines
32 KiB
Python
Raw Normal View History

2018-10-24 18:20:12 +02:00
"""
Django Helpdesk - A Django powered ticket tracker for small enterprise.
(c) Copyright 2008 Jutda. Copyright 2018 Timothy Hobbs. All Rights Reserved.
See LICENSE for details.
"""
# import base64
2018-10-24 18:20:12 +02:00
2022-09-09 00:40:49 +02:00
2018-10-24 18:20:12 +02:00
from bs4 import BeautifulSoup
from datetime import timedelta
from django.conf import settings as django_settings
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import Q
from django.utils import encoding, timezone
2022-03-17 03:29:09 +01:00
from django.utils.translation import gettext as _
import email
2022-09-09 00:40:49 +02:00
from email.message import Message
from email.utils import getaddresses
2018-10-24 18:20:12 +02:00
from email_reply_parser import EmailReplyParser
from helpdesk import settings
2022-09-09 00:40:49 +02:00
from helpdesk.exceptions import DeleteIgnoredTicketException, IgnoreTicketException
from helpdesk.lib import process_attachments, safe_template_context
from helpdesk.models import FollowUp, IgnoreEmail, Queue, Ticket
import imaplib
import logging
import mimetypes
2023-04-19 07:12:13 +02:00
import oauthlib.oauth2 as oauth2lib
import os
from os.path import isfile, join
import poplib
import re
2023-04-19 07:12:13 +02:00
import requests_oauthlib
import socket
import ssl
import sys
from time import ctime
import typing
2022-09-09 00:40:49 +02:00
from typing import List, Tuple
2018-10-24 18:20:12 +02:00
# import User model, which may be a custom model
User = get_user_model()
2018-10-24 18:20:12 +02:00
STRIPPED_SUBJECT_STRINGS = [
"Re: ",
"Fw: ",
"RE: ",
"FW: ",
"Automatic reply: ",
]
2018-10-24 18:20:12 +02:00
def process_email(quiet=False):
for q in Queue.objects.filter(
email_box_type__isnull=False,
allow_email_submission=True):
logger = logging.getLogger('django.helpdesk.queue.' + q.slug)
logging_types = {
'info': logging.INFO,
'warn': logging.WARN,
'error': logging.ERROR,
'crit': logging.CRITICAL,
'debug': logging.DEBUG,
}
if q.logging_type in logging_types:
logger.setLevel(logging_types[q.logging_type])
elif not q.logging_type or q.logging_type == 'none':
# disable all handlers so messages go to nowhere
logger.handlers = []
logger.propagate = False
2018-10-24 18:20:12 +02:00
if quiet:
logger.propagate = False # do not propagate to root logger that would log to console
# Log messages to specific file only if the queue has it configured
if (q.logging_type in logging_types) and q.logging_dir: # if it's enabled and the dir is set
2022-07-12 12:34:19 +02:00
log_file_handler = logging.FileHandler(
join(q.logging_dir, q.slug + '_get_email.log'))
logger.addHandler(log_file_handler)
else:
log_file_handler = None
2018-10-24 18:20:12 +02:00
try:
if not q.email_box_last_check:
q.email_box_last_check = timezone.now() - timedelta(minutes=30)
2018-10-24 18:20:12 +02:00
queue_time_delta = timedelta(minutes=q.email_box_interval or 0)
if (q.email_box_last_check + queue_time_delta) < timezone.now():
process_queue(q, logger=logger)
q.email_box_last_check = timezone.now()
q.save()
finally:
# we must close the file handler correctly if it's created
try:
if log_file_handler:
log_file_handler.close()
except Exception as e:
logging.exception(e)
try:
if log_file_handler:
logger.removeHandler(log_file_handler)
except Exception as e:
logging.exception(e)
2018-10-24 18:20:12 +02:00
def pop3_sync(q, logger, server):
server.getwelcome()
try:
server.stls()
except Exception:
2022-07-12 12:34:19 +02:00
logger.warning(
"POP3 StartTLS failed or unsupported. Connection will be unencrypted.")
2018-10-24 18:20:12 +02:00
server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER)
server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD)
messagesInfo = server.list()[1]
logger.info("Received %d messages from POP3 server" % len(messagesInfo))
for msgRaw in messagesInfo:
if type(msgRaw) is bytes:
try:
msg = msgRaw.decode("utf-8")
except UnicodeError:
# if couldn't decode easily, just leave it raw
msg = msgRaw
else:
# already a str
msg = msgRaw
msgNum = msg.split(" ")[0]
logger.info("Processing message %s" % msgNum)
raw_content = server.retr(msgNum)[1]
if type(raw_content[0]) is bytes:
2022-07-12 12:34:19 +02:00
full_message = "\n".join([elm.decode('utf-8')
for elm in raw_content])
2018-10-24 18:20:12 +02:00
else:
2022-07-12 12:34:19 +02:00
full_message = encoding.force_str(
"\n".join(raw_content), errors='replace')
try:
ticket = object_from_message(message=full_message, queue=q, logger=logger)
except IgnoreTicketException:
logger.warn(
"Message %s was ignored and will be left on POP3 server" % msgNum)
except DeleteIgnoredTicketException:
logger.warn(
"Message %s was ignored and deleted from POP3 server" % msgNum)
2018-10-24 18:20:12 +02:00
server.dele(msgNum)
else:
if ticket:
server.dele(msgNum)
logger.info(
"Successfully processed message %s, deleted from POP3 server" % msgNum)
else:
logger.warn(
"Message %s was not successfully processed, and will be left on POP3 server" % msgNum)
2018-10-24 18:20:12 +02:00
server.quit()
def imap_sync(q, logger, server):
try:
try:
server.starttls()
except Exception:
2022-07-12 12:34:19 +02:00
logger.warning(
"IMAP4 StartTLS unsupported or failed. Connection will be unencrypted.")
2018-10-24 18:20:12 +02:00
server.login(q.email_box_user or
settings.QUEUE_EMAIL_BOX_USER,
q.email_box_pass or
settings.QUEUE_EMAIL_BOX_PASSWORD)
server.select(q.email_box_imap_folder)
except imaplib.IMAP4.abort:
logger.error(
"IMAP login failed. Check that the server is accessible and that "
"the username and password are correct."
)
2018-10-24 18:20:12 +02:00
server.logout()
sys.exit()
except ssl.SSLError:
logger.error(
"IMAP login failed due to SSL error. This is often due to a timeout. "
"Please check your connection and try again."
)
2018-10-24 18:20:12 +02:00
server.logout()
sys.exit()
try:
data = server.search(None, 'NOT', 'DELETED')[1]
if data:
msgnums = data[0].split()
logger.info("Received %d messages from IMAP server" % len(msgnums))
for num in msgnums:
logger.info("Processing message %s" % num)
data = server.fetch(num, '(RFC822)')[1]
2022-03-17 03:29:09 +01:00
full_message = encoding.force_str(data[0][1], errors='replace')
try:
ticket = object_from_message(message=full_message, queue=q, logger=logger)
except IgnoreTicketException:
logger.warn("Message %s was ignored and will be left on IMAP server" % num)
except DeleteIgnoredTicketException:
server.store(num, '+FLAGS', '\\Deleted')
logger.warn("Message %s was ignored and deleted from IMAP server" % num)
except TypeError as te:
# Log the error with stacktrace to help identify what went wrong
logger.error(f"Unexpected error processing message: {te}", exc_info=True)
else:
if ticket:
server.store(num, '+FLAGS', '\\Deleted')
logger.info(
"Successfully processed message %s, deleted from IMAP server" % num)
else:
logger.warn(
2022-09-08 23:11:24 +02:00
"Message %s was not successfully processed, and will be left on IMAP server" % num)
2018-10-24 18:20:12 +02:00
except imaplib.IMAP4.error:
logger.error(
"IMAP retrieve failed. Is the folder '%s' spelled correctly, and does it exist on the server?",
q.email_box_imap_folder
)
2018-10-24 18:20:12 +02:00
server.expunge()
server.close()
server.logout()
2023-04-15 14:11:41 +02:00
def imap_oauth_sync(q, logger, server):
"""
IMAP eMail server with OAUTH authentication.
Only tested against O365 implementation
2023-04-19 07:12:13 +02:00
Uses HELPDESK OAUTH Dict in Settings.
2023-04-15 14:11:41 +02:00
"""
try:
logger.debug("Start Mailbox polling via IMAP OAUTH")
2023-04-19 07:12:13 +02:00
client = oauth2lib.BackendApplicationClient(
client_id=settings.HELPDESK_OAUTH["client_id"],
scope=settings.HELPDESK_OAUTH["scope"],
2023-04-15 14:11:41 +02:00
)
2023-04-19 07:12:13 +02:00
oauth = requests_oauthlib.OAuth2Session(client=client)
2023-04-15 14:11:41 +02:00
token = oauth.fetch_token(
token_url=settings.HELPDESK_OAUTH["token_url"],
client_id=settings.HELPDESK_OAUTH["client_id"],
client_secret=settings.HELPDESK_OAUTH["secret"],
2023-04-15 14:11:41 +02:00
include_client_id=True,
)
server.debug = settings.HELPDESK_IMAP_DEBUG_LEVEL
2023-04-15 14:11:41 +02:00
# TODO: Perhaps store the authentication string template externally? Settings? Queue Table?
server.authenticate(
"XOAUTH2",
lambda x: f"user={q.email_box_user}\x01auth=Bearer {token['access_token']}\x01\x01".encode(),
)
# Select the Inbound Mailbox folder
server.select(q.email_box_imap_folder)
except imaplib.IMAP4.abort as e1:
logger.error(f"IMAP authentication failed in OAUTH: {e1}", exc_info=True)
2023-04-15 14:11:41 +02:00
server.logout()
sys.exit()
except ssl.SSLError as e2:
2023-04-15 14:11:41 +02:00
logger.error(
f"IMAP login failed due to SSL error. (This is often due to a timeout): {e2}", exc_info=True
2023-04-15 14:11:41 +02:00
)
server.logout()
sys.exit()
try:
data = server.search(None, 'NOT', 'DELETED')[1]
if data:
msgnums = data[0].split()
2023-04-17 05:15:47 +02:00
logger.info(f"Found {len(msgnums)} message(s) on IMAP server")
2023-04-15 14:11:41 +02:00
for num in msgnums:
logger.info(f"Processing message {num}")
data = server.fetch(num, '(RFC822)')[1]
full_message = encoding.force_str(data[0][1], errors='replace')
try:
ticket = object_from_message(message=full_message, queue=q, logger=logger)
except IgnoreTicketException as itex:
logger.warn(f"Message {num} was ignored. {itex}")
except DeleteIgnoredTicketException:
server.store(num, '+FLAGS', '\\Deleted')
logger.warn("Message %s was ignored and deleted from IMAP server" % num)
except TypeError as te:
# Log the error with stacktrace to help identify what went wrong
logger.error(f"Unexpected error processing message: {te}", exc_info=True)
else:
if ticket:
server.store(num, '+FLAGS', '\\Deleted')
logger.info(
"Successfully processed message %s, deleted from IMAP server" % num)
else:
logger.warn(
"Message %s was not successfully processed, and will be left on IMAP server" % num)
except imaplib.IMAP4.error:
logger.error(
"IMAP retrieve failed. Is the folder '%s' spelled correctly, and does it exist on the server?",
q.email_box_imap_folder
)
# Purged Flagged Messages & Logout
server.expunge()
server.close()
server.logout()
2018-10-24 18:20:12 +02:00
def process_queue(q, logger):
logger.info("***** %s: Begin processing mail for django-helpdesk" % ctime())
if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port:
try:
import socks
except ImportError:
no_socks_msg = "Queue has been configured with proxy settings, " \
"but no socks library was installed. Try to " \
"install PySocks via PyPI."
logger.error(no_socks_msg)
raise ImportError(no_socks_msg)
proxy_type = {
'socks4': socks.SOCKS4,
'socks5': socks.SOCKS5,
}.get(q.socks_proxy_type)
socks.set_default_proxy(proxy_type=proxy_type,
addr=q.socks_proxy_host,
port=q.socks_proxy_port)
socket.socket = socks.socksocket
email_box_type = settings.QUEUE_EMAIL_BOX_TYPE or q.email_box_type
mail_defaults = {
'pop3': {
'ssl': {
'port': 995,
'init': poplib.POP3_SSL,
},
'insecure': {
'port': 110,
'init': poplib.POP3,
},
'sync': pop3_sync,
},
'imap': {
'ssl': {
'port': 993,
'init': imaplib.IMAP4_SSL,
},
'insecure': {
'port': 143,
'init': imaplib.IMAP4,
},
'sync': imap_sync
2023-04-15 14:11:41 +02:00
},
'oauth': {
'ssl': {
'port': 993,
'init': imaplib.IMAP4_SSL,
},
'insecure': {
'port': 143,
'init': imaplib.IMAP4,
},
'sync': imap_oauth_sync
},
2018-10-24 18:20:12 +02:00
}
if email_box_type in mail_defaults:
encryption = 'insecure'
if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL:
encryption = 'ssl'
if not q.email_box_port:
q.email_box_port = mail_defaults[email_box_type][encryption]['port']
server = mail_defaults[email_box_type][encryption]['init'](
q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST,
int(q.email_box_port)
)
logger.info("Attempting %s server login" % email_box_type.upper())
mail_defaults[email_box_type]['sync'](q, logger, server)
elif email_box_type == 'local':
mail_dir = q.email_box_local_dir or '/var/lib/mail/helpdesk/'
2022-07-12 12:34:19 +02:00
mail = [join(mail_dir, f)
for f in os.listdir(mail_dir) if isfile(join(mail_dir, f))]
2018-10-24 18:20:12 +02:00
logger.info("Found %d messages in local mailbox directory" % len(mail))
logger.info("Found %d messages in local mailbox directory" % len(mail))
for i, m in enumerate(mail, 1):
logger.info("Processing message %d" % i)
with open(m, 'r') as f:
2022-03-17 03:29:09 +01:00
full_message = encoding.force_str(f.read(), errors='replace')
try:
ticket = object_from_message(message=full_message, queue=q, logger=logger)
except IgnoreTicketException:
logger.warn("Message %d was ignored and will be left in local directory", i)
except DeleteIgnoredTicketException:
2022-07-12 12:34:19 +02:00
os.unlink(m)
logger.warn("Message %d was ignored and deleted local directory", i)
2018-10-24 18:20:12 +02:00
else:
if ticket:
logger.info(
"Successfully processed message %d, ticket/comment created.", i)
try:
# delete message file if ticket was successful
os.unlink(m)
except OSError as e:
logger.error(
"Unable to delete message %d (%s).", i, str(e))
else:
logger.info("Successfully deleted message %d.", i)
else:
logger.warn(
"Message %d was not successfully processed, and will be left in local directory", i)
2018-10-24 18:20:12 +02:00
def decodeUnknown(charset, string):
if type(string) is not str:
if not charset:
try:
return str(string, encoding='utf-8', errors='replace')
except UnicodeError:
return str(string, encoding='iso8859-1', errors='replace')
return str(string, encoding=charset, errors='replace')
return string
def decode_mail_headers(string):
decoded = email.header.decode_header(string)
return u' '.join([
str(msg, encoding=charset, errors='replace') if charset else str(msg)
for msg, charset
in decoded
])
2018-10-24 18:20:12 +02:00
def is_autoreply(message):
"""
Accepting message as something with .get(header_name) method
Returns True if it's likely to be auto-reply or False otherwise
So we don't start mail loops
"""
any_if_this = [
2022-07-12 12:34:19 +02:00
False if not message.get(
"Auto-Submitted") else message.get("Auto-Submitted").lower() != "no",
True if message.get("X-Auto-Response-Suppress") in ("DR",
"AutoReply", "All") else False,
message.get("List-Id"),
message.get("List-Unsubscribe"),
]
return any(any_if_this)
def create_ticket_cc(ticket, cc_list):
if not cc_list:
return []
2018-12-28 17:32:49 +01:00
# Local import to deal with non-defined / circular reference problem
from helpdesk.views.staff import subscribe_to_ticket_updates, User
new_ticket_ccs = []
for __, cced_email in cc_list:
cced_email = cced_email.strip()
if cced_email == ticket.queue.email_address:
continue
user = None
try:
user = User.objects.get(email=cced_email) # @UndefinedVariable
2018-12-28 17:32:49 +01:00
except User.DoesNotExist:
pass
try:
2022-07-12 12:34:19 +02:00
ticket_cc = subscribe_to_ticket_updates(
ticket=ticket, user=user, email=cced_email)
new_ticket_ccs.append(ticket_cc)
except ValidationError:
pass
return new_ticket_ccs
def create_object_from_email_message(message, ticket_id, payload, files, logger):
ticket, previous_followup, new = None, None, False
now = timezone.now()
queue = payload['queue']
sender_email = payload['sender_email']
to_list = getaddresses(message.get_all('To', []))
cc_list = getaddresses(message.get_all('Cc', []))
message_id = message.get('Message-Id')
in_reply_to = message.get('In-Reply-To')
if message_id:
message_id = message_id.strip()
if in_reply_to:
in_reply_to = in_reply_to.strip()
if in_reply_to is not None:
try:
2022-07-12 12:34:19 +02:00
queryset = FollowUp.objects.filter(
message_id=in_reply_to).order_by('-date')
if queryset.count() > 0:
previous_followup = queryset.first()
ticket = previous_followup.ticket
except FollowUp.DoesNotExist:
2018-12-28 17:32:49 +01:00
pass # play along. The header may be wrong
if previous_followup is None and ticket_id is not None:
try:
ticket = Ticket.objects.get(id=ticket_id)
except Ticket.DoesNotExist:
ticket = None
else:
new = False
# Check if the ticket has been merged to another ticket
if ticket.merged_to:
2022-07-12 12:34:19 +02:00
logger.info("Ticket has been merged to %s" %
ticket.merged_to.ticket)
# Use the ticket in which it was merged to for next operations
ticket = ticket.merged_to
# New issue, create a new <Ticket> instance
if ticket is None:
if not settings.QUEUE_EMAIL_BOX_UPDATE_ONLY:
ticket = Ticket.objects.create(
2018-12-28 17:32:49 +01:00
title=payload['subject'],
queue=queue,
submitter_email=sender_email,
created=now,
description=payload['body'],
priority=payload['priority'],
)
ticket.save()
2022-07-12 12:34:19 +02:00
logger.debug("Created new ticket %s-%s" %
(ticket.queue.slug, ticket.id))
new = True
# Old issue being re-opened
elif ticket.status == Ticket.CLOSED_STATUS:
ticket.status = Ticket.REOPENED_STATUS
ticket.save()
f = FollowUp(
2018-12-28 17:32:49 +01:00
ticket=ticket,
2022-07-12 12:34:19 +02:00
title=_('E-Mail Received from %(sender_email)s' %
{'sender_email': sender_email}),
2018-12-28 17:32:49 +01:00
date=now,
public=True,
comment=payload.get('full_body', payload['body']) or "",
2018-12-28 17:32:49 +01:00
message_id=message_id
)
if ticket.status == Ticket.REOPENED_STATUS:
f.new_status = Ticket.REOPENED_STATUS
2022-07-12 12:34:19 +02:00
f.title = _('Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
{'sender_email': sender_email})
2018-12-28 17:32:49 +01:00
f.save()
logger.debug("Created new FollowUp for Ticket")
logger.info("[%s-%s] %s" % (ticket.queue.slug, ticket.id, ticket.title,))
2018-12-28 17:32:49 +01:00
2022-10-09 22:51:32 +02:00
try:
attached = process_attachments(f, files)
except ValidationError as e:
logger.error(str(e))
else:
for att_file in attached:
logger.info(
"Attachment '%s' (with size %s) successfully added to ticket from email.",
att_file[0], att_file[1].size
)
context = safe_template_context(ticket)
new_ticket_ccs = []
new_ticket_ccs.append(create_ticket_cc(ticket, to_list + cc_list))
autoreply = is_autoreply(message)
if autoreply:
2022-07-12 12:34:19 +02:00
logger.info(
"Message seems to be auto-reply, not sending any emails back to the sender")
else:
2023-03-10 23:11:34 +01:00
send_info_email(message_id, f, ticket, context, queue, new)
2023-03-10 23:06:14 +01:00
return ticket
def send_info_email(message_id: str, f: FollowUp, ticket: Ticket, context: dict, queue: dict, new: bool):
# send mail to appropriate people now depending on what objects
# were created and who was CC'd
# Add auto-reply headers because it's an auto-reply and we must
extra_headers = {
'In-Reply-To': message_id,
"Auto-Submitted": "auto-replied",
"X-Auto-Response-Suppress": "All",
"Precedence": "auto_reply",
}
if new:
ticket.send(
{'submitter': ('newticket_submitter', context),
'new_ticket_cc': ('newticket_cc', context),
'ticket_cc': ('newticket_cc', context)},
fail_silently=True,
extra_headers=extra_headers,
)
else:
context.update(comment=f.comment)
ticket.send(
{'submitter': ('updated_submitter', context),
2023-03-10 23:06:14 +01:00
'assigned_to': ('updated_owner', context)},
fail_silently=True,
extra_headers=extra_headers,
)
if queue.enable_notifications_on_email_events:
ticket.send(
2023-03-10 23:06:14 +01:00
{'ticket_cc': ('updated_cc', context)},
fail_silently=True,
extra_headers=extra_headers,
)
def get_ticket_id_from_subject_slug(
2023-04-17 05:15:47 +02:00
queue_slug: str,
subject: str,
logger: logging.Logger
) -> typing.Optional[int]:
"""Get a ticket id from the subject string
Performs a match on the subject using the queue_slug as reference,
returning the ticket id if a match is found.
"""
matchobj = re.match(r".*\[" + queue_slug + r"-(?P<id>\d+)\]", subject)
ticket_id = None
if matchobj:
# This is a reply or forward.
ticket_id = matchobj.group('id')
logger.info("Matched tracking ID %s-%s" % (queue_slug, ticket_id))
else:
logger.info("No tracking ID matched.")
return ticket_id
def add_file_if_always_save_incoming_email_message(
2023-04-17 05:15:47 +02:00
files_,
message: str
) -> None:
"""When `settings.HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE` is `True`
add a file to the files_ list"""
if getattr(django_settings, 'HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE', False):
# save message as attachment in case of some complex markup renders
# wrong
files_.append(
SimpleUploadedFile(
_("original_message.eml").replace(
".eml",
timezone.localtime().strftime("_%d-%m-%Y_%H:%M") + ".eml"
),
str(message).encode("utf-8"),
'text/plain'
)
)
def get_encoded_body(body: str) -> str:
try:
return body.encode('ascii').decode('unicode_escape')
except UnicodeEncodeError:
return body
def get_body_from_fragments(body) -> str:
"""Gets a body from the fragments, joined by a double line break"""
return "\n\n".join(f.content for f in EmailReplyParser.read(body).fragments)
def get_email_body_from_part_payload(part) -> str:
"""Gets an decoded body from the payload part, if the decode fails,
returns without encoding"""
try:
return encoding.smart_str(
part.get_payload(decode=True)
)
except UnicodeDecodeError:
return encoding.smart_str(
part.get_payload(decode=False)
)
2022-09-08 23:11:24 +02:00
def attempt_body_extract_from_html(message: str) -> str:
2022-09-08 23:11:24 +02:00
mail = BeautifulSoup(str(message), "html.parser")
beautiful_body = mail.find('body')
body = None
full_body = None
if beautiful_body:
try:
body = beautiful_body.text
full_body = body
except AttributeError:
pass
if not body:
body = ""
return body, full_body
def extract_part_data(
part: Message,
counter: int,
ticket_id: int,
files: List,
logger: logging.Logger
2022-09-08 23:11:24 +02:00
) -> Tuple[str, str]:
name = part.get_filename()
if name:
name = email.utils.collapse_rfc2231_value(name)
part_body = None
formatted_body = None
if part.get_content_maintype() == 'text' and name is None:
if part.get_content_subtype() == 'plain':
part_body = part.get_payload(decode=True)
# https://github.com/django-helpdesk/django-helpdesk/issues/732
if part['Content-Transfer-Encoding'] == '8bit' and part.get_content_charset() == 'utf-8':
part_body = part_body.decode('unicode_escape')
part_body = decodeUnknown(part.get_content_charset(), part_body)
2022-09-08 11:05:09 +02:00
# have to use django_settings here so overwriting it works in tests
# the default value is False anyway
if ticket_id is None and getattr(django_settings, 'HELPDESK_FULL_FIRST_MESSAGE_FROM_EMAIL', False):
# first message in thread, we save full body to avoid
# losing forwards and things like that
formatted_body = get_body_from_fragments(part_body)
part_body = EmailReplyParser.parse_reply(part_body)
else:
# second and other reply, save only first part of the
# message
part_body = EmailReplyParser.parse_reply(part_body)
formatted_body = part_body
# workaround to get unicode text out rather than escaped text
part_body = get_encoded_body(part_body)
logger.debug("Discovered plain text MIME part")
else:
email_body = get_email_body_from_part_payload(part)
if not part_body and not formatted_body:
# no text has been parsed so far - try such deep parsing
# for some messages
altered_body = email_body.replace(
"</p>", "</p>\n").replace("<br", "\n<br")
mail = BeautifulSoup(str(altered_body), "html.parser")
formatted_body = mail.get_text()
if "<body" not in email_body:
email_body = f"<body>{email_body}</body>"
payload = (
'<html>'
'<head>'
'<meta charset="utf-8" />'
'</head>'
'%s'
'</html>'
) % email_body
files.append(
SimpleUploadedFile(
_("email_html_body.html"), payload.encode("utf-8"), 'text/html')
)
logger.debug("Discovered HTML MIME part")
else:
if not name:
ext = mimetypes.guess_extension(part.get_content_type())
2022-09-08 11:05:09 +02:00
name = f"part-{counter}{ext}"
else:
2022-09-08 11:05:09 +02:00
name = f"part-{counter}_{name}"
2023-03-25 14:25:22 +01:00
payload = part.as_string() if part.is_multipart() else part.get_payload(decode=True)
2023-03-25 14:06:03 +01:00
files.append(SimpleUploadedFile(name, payload, mimetypes.guess_type(name)[0]))
2022-09-08 11:05:09 +02:00
logger.debug("Found MIME attachment %s", name)
return part_body, formatted_body
def recurse_multipart(
multipart: Message,
counter: int,
ticket_id: int,
files: List,
logger: logging.Logger
) -> Tuple[str, str]:
'''
The received MIME part could be a multipart with embedded multiparts and therefore requires recursion.
Recurse through the multipart structures trying to find the 1st body part that
provides the message body. It will try to find an HTML formatted part (contentType=text/html)
and a TEXT formatted part (contentType=text/plain) and return both
:param multipart:
:param counter:
:param ticket_id:
:param files:
:param logger:
'''
plain_msg = None
formatted_msg = None
for part in multipart.walk():
if part.get_content_maintype() == 'multipart':
continue
# See email.message_obj.Message.get_filename()
plain_body, formatted_body = recurse_multipart(
part, counter, ticket_id, files, logger) if part.get_content_maintype(
) == 'multipart' else extract_part_data(part, counter, ticket_id, files, logger)
# Only update the message variables if they are still empty to handle attached messages overriding the core message
if plain_msg is None and plain_body:
plain_msg = plain_body
if formatted_msg is None and formatted_body:
formatted_msg = formatted_body
counter += 1
return plain_msg, formatted_msg
2022-09-08 23:11:24 +02:00
2022-07-25 01:47:00 +02:00
def object_from_message(message: str,
queue: Queue,
logger: logging.Logger
) -> Ticket:
# 'message' must be an RFC822 formatted message to correctly parse.
message_obj = email.message_from_string(message)
2018-12-28 17:32:49 +01:00
subject = message_obj.get('subject', _('Comment from e-mail'))
2022-07-12 12:34:19 +02:00
subject = decode_mail_headers(
decodeUnknown(message_obj.get_charset(), subject))
2018-10-24 18:20:12 +02:00
for affix in STRIPPED_SUBJECT_STRINGS:
subject = subject.replace(affix, "")
subject = subject.strip()
# TODO: Should really be assigning a properly formatted fake email.
# Check if anything relies on this being a "real name" formatted string if no sender is found on message_obj.
# Also not sure it should be accepting emails from unknown senders
sender_email = _('Unknown Sender')
sender_hdr = message_obj.get('from')
if sender_hdr:
# Parse the header which extracts the first email address in the list if more than one
# The parseaddr method returns a tuple in the form <real name> <email address>
# Only need the actual email address from the tuple not the "real name"
# Since the spec requires that all email addresses are ASCII, they will not be encoded
sender_email = email.utils.parseaddr(sender_hdr)[1]
2018-12-28 17:32:49 +01:00
2018-10-24 18:20:12 +02:00
for ignore in IgnoreEmail.objects.filter(Q(queues=queue) | Q(queues__isnull=True)):
if ignore.test(sender_email):
raise IgnoreTicketException() if ignore.keep_in_mailbox else DeleteIgnoredTicketException()
2018-10-24 18:20:12 +02:00
ticket_id: typing.Optional[int] = get_ticket_id_from_subject_slug(
queue.slug,
subject,
logger
)
2018-10-24 18:20:12 +02:00
body = None
full_body = None
2018-10-24 18:20:12 +02:00
counter = 0
files = []
for part in message_obj.walk():
2018-10-24 18:20:12 +02:00
if part.get_content_maintype() == 'multipart':
continue
# See email.message_obj.Message.get_filename()
plain_body, formatted_body = extract_part_data(part, counter, ticket_id, files, logger)
if plain_body:
body = plain_body
if formatted_body:
full_body = formatted_body
2018-10-24 18:20:12 +02:00
counter += 1
if not body:
body, full_body = attempt_body_extract_from_html(message_obj)
2018-10-24 18:20:12 +02:00
add_file_if_always_save_incoming_email_message(files, message_obj)
smtp_priority = message_obj.get('priority', '')
smtp_importance = message_obj.get('importance', '')
2018-10-24 18:20:12 +02:00
high_priority_types = {'high', 'important', '1', 'urgent'}
2022-07-12 12:34:19 +02:00
priority = 2 if high_priority_types & {
smtp_priority, smtp_importance} else 3
2018-12-28 17:32:49 +01:00
payload = {
'body': body,
'full_body': full_body or body,
'subject': subject,
'queue': queue,
'sender_email': sender_email,
'priority': priority,
'files': files,
}
2018-10-24 18:20:12 +02:00
return create_object_from_email_message(message_obj, ticket_id, payload, files, logger=logger)