2018-10-24 18:20:12 +02:00
|
|
|
"""
|
|
|
|
Django Helpdesk - A Django powered ticket tracker for small enterprise.
|
|
|
|
|
|
|
|
(c) Copyright 2008 Jutda. Copyright 2018 Timothy Hobbs. All Rights Reserved.
|
|
|
|
See LICENSE for details.
|
|
|
|
"""
|
2022-07-22 03:26:41 +02:00
|
|
|
|
2020-11-25 10:20:36 +01:00
|
|
|
# import base64
|
2018-10-24 18:20:12 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2022-07-22 03:26:41 +02:00
|
|
|
from datetime import timedelta
|
2021-04-13 14:04:08 +02:00
|
|
|
from django.conf import settings as django_settings
|
2020-11-25 10:20:36 +01:00
|
|
|
from django.contrib.auth import get_user_model
|
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
|
|
|
from django.db.models import Q
|
|
|
|
from django.utils import encoding, timezone
|
2022-03-17 03:29:09 +01:00
|
|
|
from django.utils.translation import gettext as _
|
2022-07-22 03:26:41 +02:00
|
|
|
import email
|
2022-09-09 00:40:49 +02:00
|
|
|
from email.message import Message
|
2023-07-23 07:31:29 +02:00
|
|
|
from email.mime.text import MIMEText
|
2022-07-22 03:26:41 +02:00
|
|
|
from email.utils import getaddresses
|
2018-10-24 18:20:12 +02:00
|
|
|
from email_reply_parser import EmailReplyParser
|
2020-11-25 10:20:36 +01:00
|
|
|
from helpdesk import settings
|
2022-09-09 00:40:49 +02:00
|
|
|
from helpdesk.exceptions import DeleteIgnoredTicketException, IgnoreTicketException
|
2022-07-22 03:26:41 +02:00
|
|
|
from helpdesk.lib import process_attachments, safe_template_context
|
2022-07-25 01:23:12 +02:00
|
|
|
from helpdesk.models import FollowUp, IgnoreEmail, Queue, Ticket
|
2022-07-22 03:26:41 +02:00
|
|
|
import imaplib
|
|
|
|
import logging
|
|
|
|
import mimetypes
|
2023-04-19 07:12:13 +02:00
|
|
|
import oauthlib.oauth2 as oauth2lib
|
2022-07-22 03:26:41 +02:00
|
|
|
import os
|
|
|
|
from os.path import isfile, join
|
|
|
|
import poplib
|
|
|
|
import re
|
2023-04-19 07:12:13 +02:00
|
|
|
import requests_oauthlib
|
2022-07-22 03:26:41 +02:00
|
|
|
import socket
|
|
|
|
import ssl
|
|
|
|
import sys
|
|
|
|
from time import ctime
|
2022-07-25 01:56:13 +02:00
|
|
|
import typing
|
2023-07-23 07:12:32 +02:00
|
|
|
from typing import List
|
2020-11-25 10:20:36 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2019-08-06 01:40:38 +02:00
|
|
|
# import User model, which may be a custom model
|
|
|
|
User = get_user_model()
|
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
STRIPPED_SUBJECT_STRINGS = [
|
|
|
|
"Re: ",
|
|
|
|
"Fw: ",
|
|
|
|
"RE: ",
|
|
|
|
"FW: ",
|
|
|
|
"Automatic reply: ",
|
|
|
|
]
|
|
|
|
|
2023-07-23 07:12:32 +02:00
|
|
|
HTML_EMAIL_ATTACHMENT_FILENAME = _("email_html_body.html")
|
|
|
|
|
2018-10-31 16:24:57 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
def process_email(quiet=False):
|
|
|
|
for q in Queue.objects.filter(
|
|
|
|
email_box_type__isnull=False,
|
|
|
|
allow_email_submission=True):
|
|
|
|
|
|
|
|
logger = logging.getLogger('django.helpdesk.queue.' + q.slug)
|
|
|
|
logging_types = {
|
|
|
|
'info': logging.INFO,
|
|
|
|
'warn': logging.WARN,
|
|
|
|
'error': logging.ERROR,
|
|
|
|
'crit': logging.CRITICAL,
|
|
|
|
'debug': logging.DEBUG,
|
|
|
|
}
|
|
|
|
if q.logging_type in logging_types:
|
|
|
|
logger.setLevel(logging_types[q.logging_type])
|
|
|
|
elif not q.logging_type or q.logging_type == 'none':
|
2020-11-25 10:20:36 +01:00
|
|
|
# disable all handlers so messages go to nowhere
|
|
|
|
logger.handlers = []
|
|
|
|
logger.propagate = False
|
2018-10-24 18:20:12 +02:00
|
|
|
if quiet:
|
|
|
|
logger.propagate = False # do not propagate to root logger that would log to console
|
2020-11-25 10:20:36 +01:00
|
|
|
# Log messages to specific file only if the queue has it configured
|
|
|
|
if (q.logging_type in logging_types) and q.logging_dir: # if it's enabled and the dir is set
|
2022-07-12 12:34:19 +02:00
|
|
|
log_file_handler = logging.FileHandler(
|
|
|
|
join(q.logging_dir, q.slug + '_get_email.log'))
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.addHandler(log_file_handler)
|
|
|
|
else:
|
|
|
|
log_file_handler = None
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2020-11-25 10:20:36 +01:00
|
|
|
try:
|
2020-02-12 20:53:00 +01:00
|
|
|
if not q.email_box_last_check:
|
|
|
|
q.email_box_last_check = timezone.now() - timedelta(minutes=30)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2020-02-12 20:53:00 +01:00
|
|
|
queue_time_delta = timedelta(minutes=q.email_box_interval or 0)
|
|
|
|
if (q.email_box_last_check + queue_time_delta) < timezone.now():
|
|
|
|
process_queue(q, logger=logger)
|
|
|
|
q.email_box_last_check = timezone.now()
|
|
|
|
q.save()
|
|
|
|
finally:
|
2020-11-25 10:20:36 +01:00
|
|
|
# we must close the file handler correctly if it's created
|
2020-02-12 20:53:00 +01:00
|
|
|
try:
|
2020-11-25 10:20:36 +01:00
|
|
|
if log_file_handler:
|
|
|
|
log_file_handler.close()
|
2020-02-12 20:53:00 +01:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
|
|
|
try:
|
2020-11-25 10:20:36 +01:00
|
|
|
if log_file_handler:
|
|
|
|
logger.removeHandler(log_file_handler)
|
2020-02-12 20:53:00 +01:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
def pop3_sync(q, logger, server):
|
|
|
|
server.getwelcome()
|
2020-12-04 11:49:20 +01:00
|
|
|
try:
|
|
|
|
server.stls()
|
|
|
|
except Exception:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warning(
|
|
|
|
"POP3 StartTLS failed or unsupported. Connection will be unencrypted.")
|
2018-10-24 18:20:12 +02:00
|
|
|
server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER)
|
|
|
|
server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD)
|
|
|
|
|
|
|
|
messagesInfo = server.list()[1]
|
|
|
|
logger.info("Received %d messages from POP3 server" % len(messagesInfo))
|
|
|
|
|
|
|
|
for msgRaw in messagesInfo:
|
|
|
|
if type(msgRaw) is bytes:
|
|
|
|
try:
|
|
|
|
msg = msgRaw.decode("utf-8")
|
|
|
|
except UnicodeError:
|
|
|
|
# if couldn't decode easily, just leave it raw
|
|
|
|
msg = msgRaw
|
|
|
|
else:
|
|
|
|
# already a str
|
|
|
|
msg = msgRaw
|
|
|
|
msgNum = msg.split(" ")[0]
|
|
|
|
logger.info("Processing message %s" % msgNum)
|
|
|
|
|
|
|
|
raw_content = server.retr(msgNum)[1]
|
|
|
|
if type(raw_content[0]) is bytes:
|
2022-07-12 12:34:19 +02:00
|
|
|
full_message = "\n".join([elm.decode('utf-8')
|
|
|
|
for elm in raw_content])
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
full_message = encoding.force_str(
|
|
|
|
"\n".join(raw_content), errors='replace')
|
2022-09-04 23:01:32 +02:00
|
|
|
try:
|
2023-07-23 07:12:32 +02:00
|
|
|
ticket = extract_email_metadata(message=full_message, queue=q, logger=logger)
|
2022-09-04 23:01:32 +02:00
|
|
|
except IgnoreTicketException:
|
|
|
|
logger.warn(
|
|
|
|
"Message %s was ignored and will be left on POP3 server" % msgNum)
|
2022-09-06 20:40:35 +02:00
|
|
|
except DeleteIgnoredTicketException:
|
|
|
|
logger.warn(
|
|
|
|
"Message %s was ignored and deleted from POP3 server" % msgNum)
|
2018-10-24 18:20:12 +02:00
|
|
|
server.dele(msgNum)
|
|
|
|
else:
|
2022-09-06 20:40:35 +02:00
|
|
|
if ticket:
|
|
|
|
server.dele(msgNum)
|
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %s, deleted from POP3 server" % msgNum)
|
|
|
|
else:
|
|
|
|
logger.warn(
|
|
|
|
"Message %s was not successfully processed, and will be left on POP3 server" % msgNum)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
server.quit()
|
|
|
|
|
|
|
|
|
|
|
|
def imap_sync(q, logger, server):
|
|
|
|
try:
|
2020-12-04 11:49:20 +01:00
|
|
|
try:
|
2021-03-26 08:48:10 +01:00
|
|
|
server.starttls()
|
2020-12-04 11:49:20 +01:00
|
|
|
except Exception:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warning(
|
|
|
|
"IMAP4 StartTLS unsupported or failed. Connection will be unencrypted.")
|
2018-10-24 18:20:12 +02:00
|
|
|
server.login(q.email_box_user or
|
|
|
|
settings.QUEUE_EMAIL_BOX_USER,
|
|
|
|
q.email_box_pass or
|
|
|
|
settings.QUEUE_EMAIL_BOX_PASSWORD)
|
|
|
|
server.select(q.email_box_imap_folder)
|
|
|
|
except imaplib.IMAP4.abort:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP login failed. Check that the server is accessible and that "
|
|
|
|
"the username and password are correct."
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
except ssl.SSLError:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP login failed due to SSL error. This is often due to a timeout. "
|
|
|
|
"Please check your connection and try again."
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
try:
|
2022-07-25 03:50:49 +02:00
|
|
|
data = server.search(None, 'NOT', 'DELETED')[1]
|
2020-10-15 00:58:37 +02:00
|
|
|
if data:
|
|
|
|
msgnums = data[0].split()
|
|
|
|
logger.info("Received %d messages from IMAP server" % len(msgnums))
|
|
|
|
for num in msgnums:
|
|
|
|
logger.info("Processing message %s" % num)
|
2022-07-25 03:50:49 +02:00
|
|
|
data = server.fetch(num, '(RFC822)')[1]
|
2022-03-17 03:29:09 +01:00
|
|
|
full_message = encoding.force_str(data[0][1], errors='replace')
|
2020-10-15 00:58:37 +02:00
|
|
|
try:
|
2023-07-23 07:12:32 +02:00
|
|
|
ticket = extract_email_metadata(message=full_message, queue=q, logger=logger)
|
2022-09-04 23:01:32 +02:00
|
|
|
except IgnoreTicketException:
|
|
|
|
logger.warn("Message %s was ignored and will be left on IMAP server" % num)
|
2022-09-06 20:40:35 +02:00
|
|
|
except DeleteIgnoredTicketException:
|
2020-10-15 00:58:37 +02:00
|
|
|
server.store(num, '+FLAGS', '\\Deleted')
|
2022-09-06 20:40:35 +02:00
|
|
|
logger.warn("Message %s was ignored and deleted from IMAP server" % num)
|
|
|
|
except TypeError as te:
|
|
|
|
# Log the error with stacktrace to help identify what went wrong
|
|
|
|
logger.error(f"Unexpected error processing message: {te}", exc_info=True)
|
2020-10-15 00:58:37 +02:00
|
|
|
else:
|
2022-09-06 20:40:35 +02:00
|
|
|
if ticket:
|
|
|
|
server.store(num, '+FLAGS', '\\Deleted')
|
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %s, deleted from IMAP server" % num)
|
|
|
|
else:
|
|
|
|
logger.warn(
|
2022-09-08 23:11:24 +02:00
|
|
|
"Message %s was not successfully processed, and will be left on IMAP server" % num)
|
2018-10-24 18:20:12 +02:00
|
|
|
except imaplib.IMAP4.error:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP retrieve failed. Is the folder '%s' spelled correctly, and does it exist on the server?",
|
|
|
|
q.email_box_imap_folder
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
server.expunge()
|
|
|
|
server.close()
|
|
|
|
server.logout()
|
|
|
|
|
|
|
|
|
2023-04-15 14:11:41 +02:00
|
|
|
def imap_oauth_sync(q, logger, server):
|
|
|
|
"""
|
|
|
|
IMAP eMail server with OAUTH authentication.
|
|
|
|
Only tested against O365 implementation
|
|
|
|
|
2023-04-19 07:12:13 +02:00
|
|
|
Uses HELPDESK OAUTH Dict in Settings.
|
2023-04-15 14:11:41 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
logger.debug("Start Mailbox polling via IMAP OAUTH")
|
|
|
|
|
2023-04-19 07:12:13 +02:00
|
|
|
client = oauth2lib.BackendApplicationClient(
|
2023-04-17 12:08:56 +02:00
|
|
|
client_id=settings.HELPDESK_OAUTH["client_id"],
|
|
|
|
scope=settings.HELPDESK_OAUTH["scope"],
|
2023-04-15 14:11:41 +02:00
|
|
|
)
|
|
|
|
|
2023-04-19 07:12:13 +02:00
|
|
|
oauth = requests_oauthlib.OAuth2Session(client=client)
|
2023-04-15 14:11:41 +02:00
|
|
|
token = oauth.fetch_token(
|
2023-04-17 12:08:56 +02:00
|
|
|
token_url=settings.HELPDESK_OAUTH["token_url"],
|
|
|
|
client_id=settings.HELPDESK_OAUTH["client_id"],
|
|
|
|
client_secret=settings.HELPDESK_OAUTH["secret"],
|
2023-04-15 14:11:41 +02:00
|
|
|
include_client_id=True,
|
|
|
|
)
|
|
|
|
|
2023-04-19 12:07:43 +02:00
|
|
|
server.debug = settings.HELPDESK_IMAP_DEBUG_LEVEL
|
2023-04-15 14:11:41 +02:00
|
|
|
# TODO: Perhaps store the authentication string template externally? Settings? Queue Table?
|
|
|
|
server.authenticate(
|
|
|
|
"XOAUTH2",
|
|
|
|
lambda x: f"user={q.email_box_user}\x01auth=Bearer {token['access_token']}\x01\x01".encode(),
|
|
|
|
)
|
|
|
|
# Select the Inbound Mailbox folder
|
|
|
|
server.select(q.email_box_imap_folder)
|
|
|
|
|
2023-04-19 12:07:43 +02:00
|
|
|
except imaplib.IMAP4.abort as e1:
|
|
|
|
logger.error(f"IMAP authentication failed in OAUTH: {e1}", exc_info=True)
|
2023-04-15 14:11:41 +02:00
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
|
2023-04-19 12:07:43 +02:00
|
|
|
except ssl.SSLError as e2:
|
2023-04-15 14:11:41 +02:00
|
|
|
logger.error(
|
2023-04-19 12:07:43 +02:00
|
|
|
f"IMAP login failed due to SSL error. (This is often due to a timeout): {e2}", exc_info=True
|
2023-04-15 14:11:41 +02:00
|
|
|
)
|
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = server.search(None, 'NOT', 'DELETED')[1]
|
|
|
|
if data:
|
|
|
|
msgnums = data[0].split()
|
2023-04-17 05:15:47 +02:00
|
|
|
logger.info(f"Found {len(msgnums)} message(s) on IMAP server")
|
2023-04-15 14:11:41 +02:00
|
|
|
for num in msgnums:
|
|
|
|
logger.info(f"Processing message {num}")
|
|
|
|
data = server.fetch(num, '(RFC822)')[1]
|
|
|
|
full_message = encoding.force_str(data[0][1], errors='replace')
|
|
|
|
|
|
|
|
try:
|
2023-07-23 07:12:32 +02:00
|
|
|
ticket = extract_email_metadata(message=full_message, queue=q, logger=logger)
|
2023-04-15 14:11:41 +02:00
|
|
|
|
|
|
|
except IgnoreTicketException as itex:
|
|
|
|
logger.warn(f"Message {num} was ignored. {itex}")
|
|
|
|
|
|
|
|
except DeleteIgnoredTicketException:
|
|
|
|
server.store(num, '+FLAGS', '\\Deleted')
|
|
|
|
logger.warn("Message %s was ignored and deleted from IMAP server" % num)
|
|
|
|
|
|
|
|
except TypeError as te:
|
|
|
|
# Log the error with stacktrace to help identify what went wrong
|
|
|
|
logger.error(f"Unexpected error processing message: {te}", exc_info=True)
|
|
|
|
|
|
|
|
else:
|
|
|
|
if ticket:
|
|
|
|
server.store(num, '+FLAGS', '\\Deleted')
|
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %s, deleted from IMAP server" % num)
|
|
|
|
else:
|
|
|
|
logger.warn(
|
|
|
|
"Message %s was not successfully processed, and will be left on IMAP server" % num)
|
|
|
|
|
|
|
|
except imaplib.IMAP4.error:
|
|
|
|
logger.error(
|
|
|
|
"IMAP retrieve failed. Is the folder '%s' spelled correctly, and does it exist on the server?",
|
|
|
|
q.email_box_imap_folder
|
|
|
|
)
|
|
|
|
# Purged Flagged Messages & Logout
|
|
|
|
server.expunge()
|
|
|
|
server.close()
|
|
|
|
server.logout()
|
|
|
|
|
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
def process_queue(q, logger):
|
|
|
|
logger.info("***** %s: Begin processing mail for django-helpdesk" % ctime())
|
|
|
|
|
|
|
|
if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port:
|
|
|
|
try:
|
|
|
|
import socks
|
|
|
|
except ImportError:
|
|
|
|
no_socks_msg = "Queue has been configured with proxy settings, " \
|
|
|
|
"but no socks library was installed. Try to " \
|
|
|
|
"install PySocks via PyPI."
|
|
|
|
logger.error(no_socks_msg)
|
|
|
|
raise ImportError(no_socks_msg)
|
|
|
|
|
|
|
|
proxy_type = {
|
|
|
|
'socks4': socks.SOCKS4,
|
|
|
|
'socks5': socks.SOCKS5,
|
|
|
|
}.get(q.socks_proxy_type)
|
|
|
|
|
|
|
|
socks.set_default_proxy(proxy_type=proxy_type,
|
|
|
|
addr=q.socks_proxy_host,
|
|
|
|
port=q.socks_proxy_port)
|
|
|
|
socket.socket = socks.socksocket
|
|
|
|
|
|
|
|
email_box_type = settings.QUEUE_EMAIL_BOX_TYPE or q.email_box_type
|
|
|
|
|
|
|
|
mail_defaults = {
|
|
|
|
'pop3': {
|
|
|
|
'ssl': {
|
|
|
|
'port': 995,
|
|
|
|
'init': poplib.POP3_SSL,
|
|
|
|
},
|
|
|
|
'insecure': {
|
|
|
|
'port': 110,
|
|
|
|
'init': poplib.POP3,
|
|
|
|
},
|
|
|
|
'sync': pop3_sync,
|
|
|
|
},
|
|
|
|
'imap': {
|
|
|
|
'ssl': {
|
|
|
|
'port': 993,
|
|
|
|
'init': imaplib.IMAP4_SSL,
|
|
|
|
},
|
|
|
|
'insecure': {
|
|
|
|
'port': 143,
|
|
|
|
'init': imaplib.IMAP4,
|
|
|
|
},
|
|
|
|
'sync': imap_sync
|
2023-04-15 14:11:41 +02:00
|
|
|
},
|
|
|
|
'oauth': {
|
|
|
|
'ssl': {
|
|
|
|
'port': 993,
|
|
|
|
'init': imaplib.IMAP4_SSL,
|
|
|
|
},
|
|
|
|
'insecure': {
|
|
|
|
'port': 143,
|
|
|
|
'init': imaplib.IMAP4,
|
|
|
|
},
|
|
|
|
'sync': imap_oauth_sync
|
|
|
|
},
|
2018-10-24 18:20:12 +02:00
|
|
|
}
|
|
|
|
if email_box_type in mail_defaults:
|
|
|
|
encryption = 'insecure'
|
|
|
|
if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL:
|
|
|
|
encryption = 'ssl'
|
|
|
|
if not q.email_box_port:
|
|
|
|
q.email_box_port = mail_defaults[email_box_type][encryption]['port']
|
|
|
|
|
|
|
|
server = mail_defaults[email_box_type][encryption]['init'](
|
|
|
|
q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST,
|
|
|
|
int(q.email_box_port)
|
|
|
|
)
|
|
|
|
logger.info("Attempting %s server login" % email_box_type.upper())
|
|
|
|
mail_defaults[email_box_type]['sync'](q, logger, server)
|
|
|
|
|
|
|
|
elif email_box_type == 'local':
|
|
|
|
mail_dir = q.email_box_local_dir or '/var/lib/mail/helpdesk/'
|
2022-07-12 12:34:19 +02:00
|
|
|
mail = [join(mail_dir, f)
|
|
|
|
for f in os.listdir(mail_dir) if isfile(join(mail_dir, f))]
|
2018-10-24 18:20:12 +02:00
|
|
|
logger.info("Found %d messages in local mailbox directory" % len(mail))
|
|
|
|
|
|
|
|
logger.info("Found %d messages in local mailbox directory" % len(mail))
|
|
|
|
for i, m in enumerate(mail, 1):
|
|
|
|
logger.info("Processing message %d" % i)
|
|
|
|
with open(m, 'r') as f:
|
2022-03-17 03:29:09 +01:00
|
|
|
full_message = encoding.force_str(f.read(), errors='replace')
|
2022-09-04 23:01:32 +02:00
|
|
|
try:
|
2023-07-23 07:12:32 +02:00
|
|
|
ticket = extract_email_metadata(message=full_message, queue=q, logger=logger)
|
2022-09-04 23:01:32 +02:00
|
|
|
except IgnoreTicketException:
|
|
|
|
logger.warn("Message %d was ignored and will be left in local directory", i)
|
2022-09-06 20:40:35 +02:00
|
|
|
except DeleteIgnoredTicketException:
|
2022-07-12 12:34:19 +02:00
|
|
|
os.unlink(m)
|
2022-09-06 20:40:35 +02:00
|
|
|
logger.warn("Message %d was ignored and deleted local directory", i)
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2022-09-06 20:40:35 +02:00
|
|
|
if ticket:
|
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %d, ticket/comment created.", i)
|
|
|
|
try:
|
|
|
|
# delete message file if ticket was successful
|
|
|
|
os.unlink(m)
|
|
|
|
except OSError as e:
|
|
|
|
logger.error(
|
|
|
|
"Unable to delete message %d (%s).", i, str(e))
|
|
|
|
else:
|
|
|
|
logger.info("Successfully deleted message %d.", i)
|
|
|
|
else:
|
|
|
|
logger.warn(
|
|
|
|
"Message %d was not successfully processed, and will be left in local directory", i)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
def decodeUnknown(charset, string):
|
|
|
|
if type(string) is not str:
|
|
|
|
if not charset:
|
|
|
|
try:
|
|
|
|
return str(string, encoding='utf-8', errors='replace')
|
2023-07-23 07:31:29 +02:00
|
|
|
except UnicodeError:
|
2018-10-24 18:20:12 +02:00
|
|
|
return str(string, encoding='iso8859-1', errors='replace')
|
|
|
|
return str(string, encoding=charset, errors='replace')
|
|
|
|
return string
|
|
|
|
|
|
|
|
|
|
|
|
def decode_mail_headers(string):
|
|
|
|
decoded = email.header.decode_header(string)
|
2020-11-25 10:20:36 +01:00
|
|
|
return u' '.join([
|
|
|
|
str(msg, encoding=charset, errors='replace') if charset else str(msg)
|
|
|
|
for msg, charset
|
|
|
|
in decoded
|
|
|
|
])
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
2021-04-15 10:39:55 +02:00
|
|
|
def is_autoreply(message):
|
|
|
|
"""
|
|
|
|
Accepting message as something with .get(header_name) method
|
|
|
|
Returns True if it's likely to be auto-reply or False otherwise
|
|
|
|
So we don't start mail loops
|
|
|
|
"""
|
|
|
|
any_if_this = [
|
2022-07-12 12:34:19 +02:00
|
|
|
False if not message.get(
|
|
|
|
"Auto-Submitted") else message.get("Auto-Submitted").lower() != "no",
|
|
|
|
True if message.get("X-Auto-Response-Suppress") in ("DR",
|
|
|
|
"AutoReply", "All") else False,
|
2021-04-15 10:39:55 +02:00
|
|
|
message.get("List-Id"),
|
|
|
|
message.get("List-Unsubscribe"),
|
|
|
|
]
|
|
|
|
return any(any_if_this)
|
|
|
|
|
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
def create_ticket_cc(ticket, cc_list):
|
|
|
|
if not cc_list:
|
|
|
|
return []
|
|
|
|
# Local import to deal with non-defined / circular reference problem
|
|
|
|
|
|
|
|
new_ticket_ccs = []
|
2023-07-23 07:31:29 +02:00
|
|
|
from helpdesk.views.staff import subscribe_to_ticket_updates, User
|
2022-07-25 03:50:49 +02:00
|
|
|
for __, cced_email in cc_list:
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
cced_email = cced_email.strip()
|
|
|
|
if cced_email == ticket.queue.email_address:
|
|
|
|
continue
|
|
|
|
|
|
|
|
user = None
|
|
|
|
|
|
|
|
try:
|
2022-07-25 03:52:58 +02:00
|
|
|
user = User.objects.get(email=cced_email) # @UndefinedVariable
|
2018-12-28 17:32:49 +01:00
|
|
|
except User.DoesNotExist:
|
2018-12-28 16:53:28 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
ticket_cc = subscribe_to_ticket_updates(
|
|
|
|
ticket=ticket, user=user, email=cced_email)
|
2018-12-28 16:53:28 +01:00
|
|
|
new_ticket_ccs.append(ticket_cc)
|
2020-11-25 10:20:36 +01:00
|
|
|
except ValidationError:
|
2018-12-28 16:53:28 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
return new_ticket_ccs
|
|
|
|
|
|
|
|
|
|
|
|
def create_object_from_email_message(message, ticket_id, payload, files, logger):
|
|
|
|
ticket, previous_followup, new = None, None, False
|
|
|
|
now = timezone.now()
|
|
|
|
|
|
|
|
queue = payload['queue']
|
|
|
|
sender_email = payload['sender_email']
|
|
|
|
|
|
|
|
to_list = getaddresses(message.get_all('To', []))
|
|
|
|
cc_list = getaddresses(message.get_all('Cc', []))
|
|
|
|
|
|
|
|
message_id = message.get('Message-Id')
|
|
|
|
in_reply_to = message.get('In-Reply-To')
|
|
|
|
|
2021-09-23 15:07:12 +02:00
|
|
|
if message_id:
|
|
|
|
message_id = message_id.strip()
|
|
|
|
|
|
|
|
if in_reply_to:
|
|
|
|
in_reply_to = in_reply_to.strip()
|
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
if in_reply_to is not None:
|
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
queryset = FollowUp.objects.filter(
|
|
|
|
message_id=in_reply_to).order_by('-date')
|
2018-12-28 16:53:28 +01:00
|
|
|
if queryset.count() > 0:
|
|
|
|
previous_followup = queryset.first()
|
|
|
|
ticket = previous_followup.ticket
|
|
|
|
except FollowUp.DoesNotExist:
|
2018-12-28 17:32:49 +01:00
|
|
|
pass # play along. The header may be wrong
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
if previous_followup is None and ticket_id is not None:
|
|
|
|
try:
|
|
|
|
ticket = Ticket.objects.get(id=ticket_id)
|
|
|
|
except Ticket.DoesNotExist:
|
|
|
|
ticket = None
|
2020-10-30 19:59:16 +01:00
|
|
|
else:
|
|
|
|
new = False
|
|
|
|
# Check if the ticket has been merged to another ticket
|
|
|
|
if ticket.merged_to:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info("Ticket has been merged to %s" %
|
|
|
|
ticket.merged_to.ticket)
|
2020-10-30 19:59:16 +01:00
|
|
|
# Use the ticket in which it was merged to for next operations
|
|
|
|
ticket = ticket.merged_to
|
2018-12-28 16:53:28 +01:00
|
|
|
# New issue, create a new <Ticket> instance
|
|
|
|
if ticket is None:
|
|
|
|
if not settings.QUEUE_EMAIL_BOX_UPDATE_ONLY:
|
|
|
|
ticket = Ticket.objects.create(
|
2018-12-28 17:32:49 +01:00
|
|
|
title=payload['subject'],
|
|
|
|
queue=queue,
|
|
|
|
submitter_email=sender_email,
|
|
|
|
created=now,
|
|
|
|
description=payload['body'],
|
|
|
|
priority=payload['priority'],
|
2018-12-28 16:53:28 +01:00
|
|
|
)
|
|
|
|
ticket.save()
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.debug("Created new ticket %s-%s" %
|
|
|
|
(ticket.queue.slug, ticket.id))
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
new = True
|
|
|
|
# Old issue being re-opened
|
|
|
|
elif ticket.status == Ticket.CLOSED_STATUS:
|
|
|
|
ticket.status = Ticket.REOPENED_STATUS
|
|
|
|
ticket.save()
|
|
|
|
|
|
|
|
f = FollowUp(
|
2018-12-28 17:32:49 +01:00
|
|
|
ticket=ticket,
|
2022-07-12 12:34:19 +02:00
|
|
|
title=_('E-Mail Received from %(sender_email)s' %
|
|
|
|
{'sender_email': sender_email}),
|
2018-12-28 17:32:49 +01:00
|
|
|
date=now,
|
|
|
|
public=True,
|
2021-04-13 14:04:08 +02:00
|
|
|
comment=payload.get('full_body', payload['body']) or "",
|
2018-12-28 17:32:49 +01:00
|
|
|
message_id=message_id
|
2018-12-28 16:53:28 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if ticket.status == Ticket.REOPENED_STATUS:
|
|
|
|
f.new_status = Ticket.REOPENED_STATUS
|
2022-07-12 12:34:19 +02:00
|
|
|
f.title = _('Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
|
|
|
|
{'sender_email': sender_email})
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
f.save()
|
|
|
|
logger.debug("Created new FollowUp for Ticket")
|
|
|
|
|
|
|
|
logger.info("[%s-%s] %s" % (ticket.queue.slug, ticket.id, ticket.title,))
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2022-10-09 22:51:32 +02:00
|
|
|
try:
|
|
|
|
attached = process_attachments(f, files)
|
|
|
|
except ValidationError as e:
|
|
|
|
logger.error(str(e))
|
|
|
|
else:
|
|
|
|
for att_file in attached:
|
|
|
|
logger.info(
|
|
|
|
"Attachment '%s' (with size %s) successfully added to ticket from email.",
|
|
|
|
att_file[0], att_file[1].size
|
|
|
|
)
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
context = safe_template_context(ticket)
|
|
|
|
|
|
|
|
new_ticket_ccs = []
|
|
|
|
new_ticket_ccs.append(create_ticket_cc(ticket, to_list + cc_list))
|
|
|
|
|
2021-04-15 10:39:55 +02:00
|
|
|
autoreply = is_autoreply(message)
|
|
|
|
if autoreply:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info(
|
|
|
|
"Message seems to be auto-reply, not sending any emails back to the sender")
|
2018-12-28 16:53:28 +01:00
|
|
|
else:
|
2023-03-10 23:11:34 +01:00
|
|
|
send_info_email(message_id, f, ticket, context, queue, new)
|
2023-03-10 23:06:14 +01:00
|
|
|
return ticket
|
|
|
|
|
|
|
|
|
|
|
|
def send_info_email(message_id: str, f: FollowUp, ticket: Ticket, context: dict, queue: dict, new: bool):
|
|
|
|
# send mail to appropriate people now depending on what objects
|
|
|
|
# were created and who was CC'd
|
|
|
|
# Add auto-reply headers because it's an auto-reply and we must
|
|
|
|
extra_headers = {
|
|
|
|
'In-Reply-To': message_id,
|
|
|
|
"Auto-Submitted": "auto-replied",
|
|
|
|
"X-Auto-Response-Suppress": "All",
|
|
|
|
"Precedence": "auto_reply",
|
|
|
|
}
|
|
|
|
if new:
|
|
|
|
ticket.send(
|
|
|
|
{'submitter': ('newticket_submitter', context),
|
|
|
|
'new_ticket_cc': ('newticket_cc', context),
|
|
|
|
'ticket_cc': ('newticket_cc', context)},
|
|
|
|
fail_silently=True,
|
|
|
|
extra_headers=extra_headers,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
context.update(comment=f.comment)
|
|
|
|
ticket.send(
|
2023-03-28 11:45:54 +02:00
|
|
|
{'submitter': ('updated_submitter', context),
|
2023-03-10 23:06:14 +01:00
|
|
|
'assigned_to': ('updated_owner', context)},
|
|
|
|
fail_silently=True,
|
|
|
|
extra_headers=extra_headers,
|
|
|
|
)
|
|
|
|
if queue.enable_notifications_on_email_events:
|
2021-04-15 10:39:55 +02:00
|
|
|
ticket.send(
|
2023-03-10 23:06:14 +01:00
|
|
|
{'ticket_cc': ('updated_cc', context)},
|
2021-04-15 10:39:55 +02:00
|
|
|
fail_silently=True,
|
|
|
|
extra_headers=extra_headers,
|
|
|
|
)
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
|
2022-07-25 01:56:13 +02:00
|
|
|
def get_ticket_id_from_subject_slug(
|
2023-04-17 05:15:47 +02:00
|
|
|
queue_slug: str,
|
|
|
|
subject: str,
|
|
|
|
logger: logging.Logger
|
2022-07-25 01:56:13 +02:00
|
|
|
) -> typing.Optional[int]:
|
|
|
|
"""Get a ticket id from the subject string
|
|
|
|
|
|
|
|
Performs a match on the subject using the queue_slug as reference,
|
|
|
|
returning the ticket id if a match is found.
|
|
|
|
"""
|
|
|
|
matchobj = re.match(r".*\[" + queue_slug + r"-(?P<id>\d+)\]", subject)
|
|
|
|
ticket_id = None
|
|
|
|
if matchobj:
|
|
|
|
# This is a reply or forward.
|
|
|
|
ticket_id = matchobj.group('id')
|
|
|
|
logger.info("Matched tracking ID %s-%s" % (queue_slug, ticket_id))
|
|
|
|
else:
|
|
|
|
logger.info("No tracking ID matched.")
|
|
|
|
return ticket_id
|
|
|
|
|
|
|
|
|
2022-07-25 02:22:32 +02:00
|
|
|
def add_file_if_always_save_incoming_email_message(
|
2023-04-17 05:15:47 +02:00
|
|
|
files_,
|
|
|
|
message: str
|
2022-07-25 02:22:32 +02:00
|
|
|
) -> None:
|
|
|
|
"""When `settings.HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE` is `True`
|
|
|
|
add a file to the files_ list"""
|
|
|
|
if getattr(django_settings, 'HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE', False):
|
|
|
|
# save message as attachment in case of some complex markup renders
|
|
|
|
# wrong
|
|
|
|
files_.append(
|
|
|
|
SimpleUploadedFile(
|
|
|
|
_("original_message.eml").replace(
|
|
|
|
".eml",
|
|
|
|
timezone.localtime().strftime("_%d-%m-%Y_%H:%M") + ".eml"
|
|
|
|
),
|
|
|
|
str(message).encode("utf-8"),
|
|
|
|
'text/plain'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def get_encoded_body(body: str) -> str:
|
|
|
|
try:
|
|
|
|
return body.encode('ascii').decode('unicode_escape')
|
|
|
|
except UnicodeEncodeError:
|
2022-07-25 02:41:40 +02:00
|
|
|
return body
|
2022-07-25 02:22:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_body_from_fragments(body) -> str:
|
|
|
|
"""Gets a body from the fragments, joined by a double line break"""
|
|
|
|
return "\n\n".join(f.content for f in EmailReplyParser.read(body).fragments)
|
|
|
|
|
|
|
|
|
|
|
|
def get_email_body_from_part_payload(part) -> str:
|
|
|
|
"""Gets an decoded body from the payload part, if the decode fails,
|
|
|
|
returns without encoding"""
|
|
|
|
try:
|
|
|
|
return encoding.smart_str(
|
|
|
|
part.get_payload(decode=True)
|
|
|
|
)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return encoding.smart_str(
|
|
|
|
part.get_payload(decode=False)
|
|
|
|
)
|
|
|
|
|
2022-09-08 23:11:24 +02:00
|
|
|
|
2022-09-04 23:01:32 +02:00
|
|
|
def attempt_body_extract_from_html(message: str) -> str:
|
2022-09-08 23:11:24 +02:00
|
|
|
mail = BeautifulSoup(str(message), "html.parser")
|
|
|
|
beautiful_body = mail.find('body')
|
|
|
|
body = None
|
|
|
|
full_body = None
|
|
|
|
if beautiful_body:
|
|
|
|
try:
|
|
|
|
body = beautiful_body.text
|
|
|
|
full_body = body
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
if not body:
|
|
|
|
body = ""
|
|
|
|
return body, full_body
|
|
|
|
|
2022-09-04 23:01:32 +02:00
|
|
|
|
2023-07-23 07:31:29 +02:00
|
|
|
def extract_mime_content(part: Message,) -> str:
|
2023-07-23 07:12:32 +02:00
|
|
|
'''
|
|
|
|
Extract the content from the MIME body part
|
|
|
|
:param part: the MIME part to extract the content from
|
|
|
|
'''
|
|
|
|
content_bytes = part.get_payload(decode=True)
|
|
|
|
charset = part.get_content_charset()
|
2023-07-23 07:31:29 +02:00
|
|
|
# The default for MIME email is 7bit which requires special decoding to utf-8 so make sure
|
|
|
|
# we handle the decoding correctly
|
2023-07-23 07:12:32 +02:00
|
|
|
if part['Content-Transfer-Encoding'] in [None, '8bit', '7bit'] and (charset == 'utf-8' or charset is None):
|
|
|
|
charset = "unicode_escape"
|
|
|
|
content = decodeUnknown(charset, content_bytes)
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
def extract_email_message(mime_content: str, is_plain_content_type: bool, is_extract_full_email_msg: bool) -> str:
|
|
|
|
email_content = None
|
|
|
|
if is_extract_full_email_msg:
|
|
|
|
# Take the full content including encapsulated "forwarded" and "reply" sections
|
|
|
|
email_content = get_body_from_fragments(mime_content) if is_plain_content_type else mime_content
|
|
|
|
else:
|
|
|
|
# Just get the primary part of the email and drop off any text below the actually response text
|
|
|
|
email_content = EmailReplyParser.parse_reply(mime_content) if is_plain_content_type else mime_content
|
|
|
|
return email_content
|
|
|
|
|
|
|
|
|
|
|
|
def process_as_attachment(
|
2022-09-04 23:01:32 +02:00
|
|
|
part: Message,
|
|
|
|
counter: int,
|
|
|
|
files: List,
|
|
|
|
logger: logging.Logger
|
2023-07-23 07:12:32 +02:00
|
|
|
):
|
2022-09-04 23:01:32 +02:00
|
|
|
name = part.get_filename()
|
|
|
|
if name:
|
2023-07-23 07:12:32 +02:00
|
|
|
name = f"part-{counter}_{email.utils.collapse_rfc2231_value(name)}"
|
2022-09-04 23:01:32 +02:00
|
|
|
else:
|
2023-07-23 07:12:32 +02:00
|
|
|
ext = mimetypes.guess_extension(part.get_content_type())
|
|
|
|
name = f"part-{counter}{ext}"
|
|
|
|
# Extract payload accounting for attached multiparts
|
|
|
|
payload = part.as_string() if part.is_multipart() else part.get_payload(decode=True)
|
|
|
|
files.append(SimpleUploadedFile(name, payload, mimetypes.guess_type(name)[0]))
|
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug("Processed MIME as attachment: %s", name)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
def extract_email_subject(email_msg: Message,) -> str:
|
|
|
|
subject = email_msg.get('subject', _('Comment from e-mail'))
|
|
|
|
subject = decode_mail_headers(
|
|
|
|
decodeUnknown(email_msg.get_charset(), subject))
|
|
|
|
for affix in STRIPPED_SUBJECT_STRINGS:
|
|
|
|
subject = subject.replace(affix, "")
|
|
|
|
return subject.strip()
|
2022-07-25 02:22:32 +02:00
|
|
|
|
2022-09-08 23:11:24 +02:00
|
|
|
|
2023-07-23 07:12:32 +02:00
|
|
|
def extract_email_metadata(message: str,
|
2023-07-23 07:31:29 +02:00
|
|
|
queue: Queue,
|
|
|
|
logger: logging.Logger
|
|
|
|
) -> Ticket:
|
2023-07-23 07:12:32 +02:00
|
|
|
'''
|
|
|
|
Extracts the text/plain mime part if there is one as the ticket description and
|
|
|
|
stores the text/html part as an attachment if it is present.
|
|
|
|
If no text/plain part is present then it will try to use the text/html part if
|
|
|
|
it is present as the ticket description by removing the HTML formatting.
|
|
|
|
If neither a text/plain or text/html is present then it will use the first text/*
|
|
|
|
MIME part that it finds as the ticket description.
|
|
|
|
By default it will always take only the actual message and drop any chained messages
|
|
|
|
from replies.
|
|
|
|
The HELPDESK_FULL_FIRST_MESSAGE_FROM_EMAIL settings can force the entire message to be
|
|
|
|
stored in the ticket if it is a new ticket by setting it to True.
|
|
|
|
In this scenario, if it is a reply that is a forwarded message with no actual message,
|
|
|
|
then the description will be sourced from the text/html part and the forwarded message
|
|
|
|
will be in the FollowUp record aassociated with the ticket.
|
|
|
|
It will iterate over every MIME part and store all MIME parts as attachments apart
|
|
|
|
from the text/plain part.
|
|
|
|
There may be a case for trying to exclude repeated signature images by checking if an
|
|
|
|
attachment of the same name already exists as an attachement on the ticket but that is
|
|
|
|
not implemented.
|
|
|
|
:param message: the raw email message received
|
2023-07-23 07:31:29 +02:00
|
|
|
:param queue: the queue that hte
|
2023-07-23 07:12:32 +02:00
|
|
|
:param logger: the logger to be used
|
|
|
|
'''
|
2022-09-04 23:01:32 +02:00
|
|
|
# 'message' must be an RFC822 formatted message to correctly parse.
|
2023-07-23 07:12:32 +02:00
|
|
|
message_obj: Message = email.message_from_string(message)
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2023-07-23 07:12:32 +02:00
|
|
|
subject = extract_email_subject(message_obj)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-09-04 23:01:32 +02:00
|
|
|
sender_email = _('Unknown Sender')
|
|
|
|
sender_hdr = message_obj.get('from')
|
|
|
|
if sender_hdr:
|
|
|
|
# Parse the header which extracts the first email address in the list if more than one
|
|
|
|
# The parseaddr method returns a tuple in the form <real name> <email address>
|
|
|
|
# Only need the actual email address from the tuple not the "real name"
|
|
|
|
# Since the spec requires that all email addresses are ASCII, they will not be encoded
|
|
|
|
sender_email = email.utils.parseaddr(sender_hdr)[1]
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
for ignore in IgnoreEmail.objects.filter(Q(queues=queue) | Q(queues__isnull=True)):
|
|
|
|
if ignore.test(sender_email):
|
2022-09-06 20:40:35 +02:00
|
|
|
raise IgnoreTicketException() if ignore.keep_in_mailbox else DeleteIgnoredTicketException()
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-07-25 01:56:13 +02:00
|
|
|
ticket_id: typing.Optional[int] = get_ticket_id_from_subject_slug(
|
|
|
|
queue.slug,
|
|
|
|
subject,
|
|
|
|
logger
|
|
|
|
)
|
2023-07-23 07:12:32 +02:00
|
|
|
plain_body: str = None
|
|
|
|
formatted_body: str = None
|
2018-10-24 18:20:12 +02:00
|
|
|
counter = 0
|
|
|
|
files = []
|
2023-07-23 07:12:32 +02:00
|
|
|
first_mime_non_multipart_content: MIMEText = None
|
|
|
|
# Cycle through all MIME parts in the email extracting the plain and formatted messages
|
|
|
|
# Algorithm uses the first text parts found as the actual email content and subsequent text parts
|
|
|
|
# are made into attachments so they do not get lost
|
2022-09-04 23:01:32 +02:00
|
|
|
for part in message_obj.walk():
|
2023-07-23 07:12:32 +02:00
|
|
|
part_main_type = part.get_content_maintype()
|
|
|
|
if part_main_type == 'multipart':
|
2018-10-24 18:20:12 +02:00
|
|
|
continue
|
2023-07-23 07:12:32 +02:00
|
|
|
if part.get_content_disposition() in ['inline', 'attachment']:
|
|
|
|
process_as_attachment(part, counter, files, logger)
|
|
|
|
else:
|
2023-07-23 07:31:29 +02:00
|
|
|
# Get the content then assign to plain for formatted email message otherwise store the
|
|
|
|
# content as an attachment
|
2023-07-23 07:12:32 +02:00
|
|
|
mime_content = extract_mime_content(part)
|
|
|
|
if first_mime_non_multipart_content is None:
|
|
|
|
first_mime_non_multipart_content = mime_content
|
|
|
|
if part_main_type == 'text':
|
|
|
|
# Could be the body of the email
|
|
|
|
part_sub_type = part.get_content_subtype()
|
|
|
|
if plain_body is None and part_sub_type == "plain":
|
|
|
|
plain_body = mime_content
|
|
|
|
elif formatted_body is None and part_sub_type == "html":
|
|
|
|
formatted_body = mime_content
|
|
|
|
if "<body" not in formatted_body:
|
|
|
|
email_body = f"<body>{formatted_body}</body>"
|
|
|
|
else:
|
|
|
|
email_body = formatted_body
|
2023-07-23 07:31:29 +02:00
|
|
|
|
2023-07-23 07:12:32 +02:00
|
|
|
payload = (
|
|
|
|
'<html>'
|
|
|
|
'<head>'
|
|
|
|
'<meta charset="utf-8" />'
|
|
|
|
'</head>'
|
|
|
|
'%s'
|
|
|
|
'</html>'
|
|
|
|
) % email_body
|
|
|
|
files.append(
|
|
|
|
SimpleUploadedFile(
|
|
|
|
HTML_EMAIL_ATTACHMENT_FILENAME, payload.encode("utf-8"), 'text/html')
|
|
|
|
)
|
|
|
|
else:
|
2023-07-23 07:31:29 +02:00
|
|
|
# Theoretically should not happen to properly structured emails but process anything
|
|
|
|
# else as an attachment
|
2023-07-23 07:12:32 +02:00
|
|
|
process_as_attachment(part, counter, files, logger)
|
|
|
|
logger.debug(f"Text MIME part added as attachment: {part.get_content_type()}")
|
|
|
|
else:
|
|
|
|
# process anything else as an attachment
|
|
|
|
process_as_attachment(part, counter, files, logger)
|
2018-10-24 18:20:12 +02:00
|
|
|
counter += 1
|
2023-07-23 07:12:32 +02:00
|
|
|
# Check if we have at least the plain body
|
|
|
|
if not plain_body:
|
|
|
|
if formatted_body:
|
|
|
|
# We have a formatted body but no plain text body
|
|
|
|
plain_body, _x = attempt_body_extract_from_html(formatted_body)
|
|
|
|
else:
|
|
|
|
# Something wrong with email or a processing issue so try first part or save full email message
|
|
|
|
if first_mime_non_multipart_content:
|
|
|
|
plain_body = extract_email_message(first_mime_non_multipart_content, True, True)
|
|
|
|
else:
|
|
|
|
plain_body = message
|
|
|
|
# first message in thread, we save full body to avoid losing forwards and things like that
|
2023-07-23 07:31:29 +02:00
|
|
|
include_chained_msgs = True if ticket_id is None and getattr(
|
|
|
|
django_settings, 'HELPDESK_FULL_FIRST_MESSAGE_FROM_EMAIL', False) else False
|
2023-07-23 07:12:32 +02:00
|
|
|
message_body = extract_email_message(plain_body, True, include_chained_msgs)
|
|
|
|
# Only need the full message if the message_body excludes the chained messages
|
|
|
|
chained_email_message = None if include_chained_msgs else plain_body
|
|
|
|
# Not sure this is valid but a unit test uses a DIFFERENT plain text to html text body
|
|
|
|
# where plain text has blank message with forwarded message so.... hack away to support it
|
|
|
|
if message_body is not None and len(message_body) == 0 and formatted_body and len(formatted_body) > 0:
|
|
|
|
message_body, _x = attempt_body_extract_from_html(formatted_body)
|
|
|
|
# Set the chained message to the orignal plain text full message so it is stored in a FollowUp comments field
|
|
|
|
if len(plain_body) > 0:
|
|
|
|
chained_email_message = plain_body
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2023-07-23 07:12:32 +02:00
|
|
|
add_file_if_always_save_incoming_email_message(files, message)
|
2021-04-13 14:04:08 +02:00
|
|
|
|
2022-09-04 23:01:32 +02:00
|
|
|
smtp_priority = message_obj.get('priority', '')
|
|
|
|
smtp_importance = message_obj.get('importance', '')
|
2018-10-24 18:20:12 +02:00
|
|
|
high_priority_types = {'high', 'important', '1', 'urgent'}
|
2022-07-12 12:34:19 +02:00
|
|
|
priority = 2 if high_priority_types & {
|
|
|
|
smtp_priority, smtp_importance} else 3
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
payload = {
|
2023-07-23 07:12:32 +02:00
|
|
|
'body': message_body,
|
|
|
|
'full_body': chained_email_message,
|
2018-12-28 16:53:28 +01:00
|
|
|
'subject': subject,
|
|
|
|
'queue': queue,
|
|
|
|
'sender_email': sender_email,
|
|
|
|
'priority': priority,
|
|
|
|
'files': files,
|
|
|
|
}
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-09-04 23:01:32 +02:00
|
|
|
return create_object_from_email_message(message_obj, ticket_id, payload, files, logger=logger)
|