2018-10-24 18:20:12 +02:00
|
|
|
"""
|
|
|
|
Django Helpdesk - A Django powered ticket tracker for small enterprise.
|
|
|
|
|
|
|
|
(c) Copyright 2008 Jutda. Copyright 2018 Timothy Hobbs. All Rights Reserved.
|
|
|
|
See LICENSE for details.
|
|
|
|
"""
|
2022-07-22 03:26:41 +02:00
|
|
|
|
2020-11-25 10:20:36 +01:00
|
|
|
# import base64
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
from bs4 import BeautifulSoup
|
2022-07-22 03:26:41 +02:00
|
|
|
from datetime import timedelta
|
2021-04-13 14:04:08 +02:00
|
|
|
from django.conf import settings as django_settings
|
2020-11-25 10:20:36 +01:00
|
|
|
from django.contrib.auth import get_user_model
|
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
|
|
|
from django.db.models import Q
|
|
|
|
from django.utils import encoding, timezone
|
2022-03-17 03:29:09 +01:00
|
|
|
from django.utils.translation import gettext as _
|
2022-07-22 03:26:41 +02:00
|
|
|
import email
|
|
|
|
from email.utils import getaddresses
|
2018-10-24 18:20:12 +02:00
|
|
|
from email_reply_parser import EmailReplyParser
|
2020-11-25 10:20:36 +01:00
|
|
|
from helpdesk import settings
|
2022-07-22 03:26:41 +02:00
|
|
|
from helpdesk.lib import process_attachments, safe_template_context
|
2022-07-25 01:23:12 +02:00
|
|
|
from helpdesk.models import FollowUp, IgnoreEmail, Queue, Ticket
|
2022-07-22 03:26:41 +02:00
|
|
|
import imaplib
|
|
|
|
import logging
|
|
|
|
import mimetypes
|
|
|
|
import os
|
|
|
|
from os.path import isfile, join
|
|
|
|
import poplib
|
|
|
|
import re
|
|
|
|
import socket
|
|
|
|
import ssl
|
|
|
|
import sys
|
|
|
|
from time import ctime
|
2022-07-25 01:56:13 +02:00
|
|
|
import typing
|
2020-11-25 10:20:36 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2019-08-06 01:40:38 +02:00
|
|
|
# import User model, which may be a custom model
|
|
|
|
User = get_user_model()
|
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
STRIPPED_SUBJECT_STRINGS = [
|
|
|
|
"Re: ",
|
|
|
|
"Fw: ",
|
|
|
|
"RE: ",
|
|
|
|
"FW: ",
|
|
|
|
"Automatic reply: ",
|
|
|
|
]
|
|
|
|
|
2018-10-31 16:24:57 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
def process_email(quiet=False):
|
|
|
|
for q in Queue.objects.filter(
|
|
|
|
email_box_type__isnull=False,
|
|
|
|
allow_email_submission=True):
|
|
|
|
|
|
|
|
logger = logging.getLogger('django.helpdesk.queue.' + q.slug)
|
|
|
|
logging_types = {
|
|
|
|
'info': logging.INFO,
|
|
|
|
'warn': logging.WARN,
|
|
|
|
'error': logging.ERROR,
|
|
|
|
'crit': logging.CRITICAL,
|
|
|
|
'debug': logging.DEBUG,
|
|
|
|
}
|
|
|
|
if q.logging_type in logging_types:
|
|
|
|
logger.setLevel(logging_types[q.logging_type])
|
|
|
|
elif not q.logging_type or q.logging_type == 'none':
|
2020-11-25 10:20:36 +01:00
|
|
|
# disable all handlers so messages go to nowhere
|
|
|
|
logger.handlers = []
|
|
|
|
logger.propagate = False
|
2018-10-24 18:20:12 +02:00
|
|
|
if quiet:
|
|
|
|
logger.propagate = False # do not propagate to root logger that would log to console
|
|
|
|
|
2020-11-25 10:20:36 +01:00
|
|
|
# Log messages to specific file only if the queue has it configured
|
|
|
|
if (q.logging_type in logging_types) and q.logging_dir: # if it's enabled and the dir is set
|
2022-07-12 12:34:19 +02:00
|
|
|
log_file_handler = logging.FileHandler(
|
|
|
|
join(q.logging_dir, q.slug + '_get_email.log'))
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.addHandler(log_file_handler)
|
|
|
|
else:
|
|
|
|
log_file_handler = None
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2020-11-25 10:20:36 +01:00
|
|
|
try:
|
2020-02-12 20:53:00 +01:00
|
|
|
if not q.email_box_last_check:
|
|
|
|
q.email_box_last_check = timezone.now() - timedelta(minutes=30)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2020-02-12 20:53:00 +01:00
|
|
|
queue_time_delta = timedelta(minutes=q.email_box_interval or 0)
|
|
|
|
if (q.email_box_last_check + queue_time_delta) < timezone.now():
|
|
|
|
process_queue(q, logger=logger)
|
|
|
|
q.email_box_last_check = timezone.now()
|
|
|
|
q.save()
|
|
|
|
finally:
|
2020-11-25 10:20:36 +01:00
|
|
|
# we must close the file handler correctly if it's created
|
2020-02-12 20:53:00 +01:00
|
|
|
try:
|
2020-11-25 10:20:36 +01:00
|
|
|
if log_file_handler:
|
|
|
|
log_file_handler.close()
|
2020-02-12 20:53:00 +01:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
|
|
|
try:
|
2020-11-25 10:20:36 +01:00
|
|
|
if log_file_handler:
|
|
|
|
logger.removeHandler(log_file_handler)
|
2020-02-12 20:53:00 +01:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
def pop3_sync(q, logger, server):
|
|
|
|
server.getwelcome()
|
2020-12-04 11:49:20 +01:00
|
|
|
try:
|
|
|
|
server.stls()
|
|
|
|
except Exception:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warning(
|
|
|
|
"POP3 StartTLS failed or unsupported. Connection will be unencrypted.")
|
2018-10-24 18:20:12 +02:00
|
|
|
server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER)
|
|
|
|
server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD)
|
|
|
|
|
|
|
|
messagesInfo = server.list()[1]
|
|
|
|
logger.info("Received %d messages from POP3 server" % len(messagesInfo))
|
|
|
|
|
|
|
|
for msgRaw in messagesInfo:
|
|
|
|
if type(msgRaw) is bytes:
|
|
|
|
try:
|
|
|
|
msg = msgRaw.decode("utf-8")
|
|
|
|
except UnicodeError:
|
|
|
|
# if couldn't decode easily, just leave it raw
|
|
|
|
msg = msgRaw
|
|
|
|
else:
|
|
|
|
# already a str
|
|
|
|
msg = msgRaw
|
|
|
|
msgNum = msg.split(" ")[0]
|
|
|
|
logger.info("Processing message %s" % msgNum)
|
|
|
|
|
|
|
|
raw_content = server.retr(msgNum)[1]
|
|
|
|
if type(raw_content[0]) is bytes:
|
2022-07-12 12:34:19 +02:00
|
|
|
full_message = "\n".join([elm.decode('utf-8')
|
|
|
|
for elm in raw_content])
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
full_message = encoding.force_str(
|
|
|
|
"\n".join(raw_content), errors='replace')
|
|
|
|
ticket = object_from_message(
|
|
|
|
message=full_message, queue=q, logger=logger)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
if ticket:
|
|
|
|
server.dele(msgNum)
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %s, deleted from POP3 server" % msgNum)
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warn(
|
|
|
|
"Message %s was not successfully processed, and will be left on POP3 server" % msgNum)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
server.quit()
|
|
|
|
|
|
|
|
|
|
|
|
def imap_sync(q, logger, server):
|
|
|
|
try:
|
2020-12-04 11:49:20 +01:00
|
|
|
try:
|
2021-03-26 08:48:10 +01:00
|
|
|
server.starttls()
|
2020-12-04 11:49:20 +01:00
|
|
|
except Exception:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warning(
|
|
|
|
"IMAP4 StartTLS unsupported or failed. Connection will be unencrypted.")
|
2018-10-24 18:20:12 +02:00
|
|
|
server.login(q.email_box_user or
|
|
|
|
settings.QUEUE_EMAIL_BOX_USER,
|
|
|
|
q.email_box_pass or
|
|
|
|
settings.QUEUE_EMAIL_BOX_PASSWORD)
|
|
|
|
server.select(q.email_box_imap_folder)
|
|
|
|
except imaplib.IMAP4.abort:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP login failed. Check that the server is accessible and that "
|
|
|
|
"the username and password are correct."
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
except ssl.SSLError:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP login failed due to SSL error. This is often due to a timeout. "
|
|
|
|
"Please check your connection and try again."
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
server.logout()
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
try:
|
2022-07-25 03:50:49 +02:00
|
|
|
data = server.search(None, 'NOT', 'DELETED')[1]
|
2020-10-15 00:58:37 +02:00
|
|
|
if data:
|
|
|
|
msgnums = data[0].split()
|
|
|
|
logger.info("Received %d messages from IMAP server" % len(msgnums))
|
|
|
|
for num in msgnums:
|
|
|
|
logger.info("Processing message %s" % num)
|
2022-07-25 03:50:49 +02:00
|
|
|
data = server.fetch(num, '(RFC822)')[1]
|
2022-03-17 03:29:09 +01:00
|
|
|
full_message = encoding.force_str(data[0][1], errors='replace')
|
2020-10-15 00:58:37 +02:00
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
ticket = object_from_message(
|
|
|
|
message=full_message, queue=q, logger=logger)
|
2020-10-15 00:58:37 +02:00
|
|
|
except TypeError:
|
|
|
|
ticket = None # hotfix. Need to work out WHY.
|
|
|
|
if ticket:
|
|
|
|
server.store(num, '+FLAGS', '\\Deleted')
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %s, deleted from IMAP server" % num)
|
2020-10-15 00:58:37 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warn(
|
|
|
|
"Message %s was not successfully processed, and will be left on IMAP server" % num)
|
2018-10-24 18:20:12 +02:00
|
|
|
except imaplib.IMAP4.error:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.error(
|
|
|
|
"IMAP retrieve failed. Is the folder '%s' spelled correctly, and does it exist on the server?",
|
|
|
|
q.email_box_imap_folder
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
server.expunge()
|
|
|
|
server.close()
|
|
|
|
server.logout()
|
|
|
|
|
|
|
|
|
|
|
|
def process_queue(q, logger):
|
|
|
|
logger.info("***** %s: Begin processing mail for django-helpdesk" % ctime())
|
|
|
|
|
|
|
|
if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port:
|
|
|
|
try:
|
|
|
|
import socks
|
|
|
|
except ImportError:
|
|
|
|
no_socks_msg = "Queue has been configured with proxy settings, " \
|
|
|
|
"but no socks library was installed. Try to " \
|
|
|
|
"install PySocks via PyPI."
|
|
|
|
logger.error(no_socks_msg)
|
|
|
|
raise ImportError(no_socks_msg)
|
|
|
|
|
|
|
|
proxy_type = {
|
|
|
|
'socks4': socks.SOCKS4,
|
|
|
|
'socks5': socks.SOCKS5,
|
|
|
|
}.get(q.socks_proxy_type)
|
|
|
|
|
|
|
|
socks.set_default_proxy(proxy_type=proxy_type,
|
|
|
|
addr=q.socks_proxy_host,
|
|
|
|
port=q.socks_proxy_port)
|
|
|
|
socket.socket = socks.socksocket
|
|
|
|
|
|
|
|
email_box_type = settings.QUEUE_EMAIL_BOX_TYPE or q.email_box_type
|
|
|
|
|
|
|
|
mail_defaults = {
|
|
|
|
'pop3': {
|
|
|
|
'ssl': {
|
|
|
|
'port': 995,
|
|
|
|
'init': poplib.POP3_SSL,
|
|
|
|
},
|
|
|
|
'insecure': {
|
|
|
|
'port': 110,
|
|
|
|
'init': poplib.POP3,
|
|
|
|
},
|
|
|
|
'sync': pop3_sync,
|
|
|
|
},
|
|
|
|
'imap': {
|
|
|
|
'ssl': {
|
|
|
|
'port': 993,
|
|
|
|
'init': imaplib.IMAP4_SSL,
|
|
|
|
},
|
|
|
|
'insecure': {
|
|
|
|
'port': 143,
|
|
|
|
'init': imaplib.IMAP4,
|
|
|
|
},
|
|
|
|
'sync': imap_sync
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if email_box_type in mail_defaults:
|
|
|
|
encryption = 'insecure'
|
|
|
|
if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL:
|
|
|
|
encryption = 'ssl'
|
|
|
|
if not q.email_box_port:
|
|
|
|
q.email_box_port = mail_defaults[email_box_type][encryption]['port']
|
|
|
|
|
|
|
|
server = mail_defaults[email_box_type][encryption]['init'](
|
|
|
|
q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST,
|
|
|
|
int(q.email_box_port)
|
|
|
|
)
|
|
|
|
logger.info("Attempting %s server login" % email_box_type.upper())
|
|
|
|
mail_defaults[email_box_type]['sync'](q, logger, server)
|
|
|
|
|
|
|
|
elif email_box_type == 'local':
|
|
|
|
mail_dir = q.email_box_local_dir or '/var/lib/mail/helpdesk/'
|
2022-07-12 12:34:19 +02:00
|
|
|
mail = [join(mail_dir, f)
|
|
|
|
for f in os.listdir(mail_dir) if isfile(join(mail_dir, f))]
|
2018-10-24 18:20:12 +02:00
|
|
|
logger.info("Found %d messages in local mailbox directory" % len(mail))
|
|
|
|
|
|
|
|
logger.info("Found %d messages in local mailbox directory" % len(mail))
|
|
|
|
for i, m in enumerate(mail, 1):
|
|
|
|
logger.info("Processing message %d" % i)
|
|
|
|
with open(m, 'r') as f:
|
2022-03-17 03:29:09 +01:00
|
|
|
full_message = encoding.force_str(f.read(), errors='replace')
|
2022-07-12 12:34:19 +02:00
|
|
|
ticket = object_from_message(
|
|
|
|
message=full_message, queue=q, logger=logger)
|
2018-10-24 18:20:12 +02:00
|
|
|
if ticket:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info(
|
|
|
|
"Successfully processed message %d, ticket/comment created.", i)
|
2018-10-24 18:20:12 +02:00
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
# delete message file if ticket was successful
|
|
|
|
os.unlink(m)
|
2020-11-25 10:20:36 +01:00
|
|
|
except OSError as e:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.error(
|
|
|
|
"Unable to delete message %d (%s).", i, str(e))
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.info("Successfully deleted message %d.", i)
|
2018-10-24 18:20:12 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.warn(
|
|
|
|
"Message %d was not successfully processed, and will be left in local directory", i)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
def decodeUnknown(charset, string):
|
|
|
|
if type(string) is not str:
|
|
|
|
if not charset:
|
|
|
|
try:
|
|
|
|
return str(string, encoding='utf-8', errors='replace')
|
|
|
|
except UnicodeError:
|
|
|
|
return str(string, encoding='iso8859-1', errors='replace')
|
|
|
|
return str(string, encoding=charset, errors='replace')
|
|
|
|
return string
|
|
|
|
|
|
|
|
|
|
|
|
def decode_mail_headers(string):
|
|
|
|
decoded = email.header.decode_header(string)
|
2020-11-25 10:20:36 +01:00
|
|
|
return u' '.join([
|
|
|
|
str(msg, encoding=charset, errors='replace') if charset else str(msg)
|
|
|
|
for msg, charset
|
|
|
|
in decoded
|
|
|
|
])
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
|
2021-04-15 10:39:55 +02:00
|
|
|
def is_autoreply(message):
|
|
|
|
"""
|
|
|
|
Accepting message as something with .get(header_name) method
|
|
|
|
Returns True if it's likely to be auto-reply or False otherwise
|
|
|
|
So we don't start mail loops
|
|
|
|
"""
|
|
|
|
any_if_this = [
|
2022-07-12 12:34:19 +02:00
|
|
|
False if not message.get(
|
|
|
|
"Auto-Submitted") else message.get("Auto-Submitted").lower() != "no",
|
|
|
|
True if message.get("X-Auto-Response-Suppress") in ("DR",
|
|
|
|
"AutoReply", "All") else False,
|
2021-04-15 10:39:55 +02:00
|
|
|
message.get("List-Id"),
|
|
|
|
message.get("List-Unsubscribe"),
|
|
|
|
]
|
|
|
|
return any(any_if_this)
|
|
|
|
|
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
def create_ticket_cc(ticket, cc_list):
|
|
|
|
|
|
|
|
if not cc_list:
|
|
|
|
return []
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
# Local import to deal with non-defined / circular reference problem
|
2022-07-22 03:26:41 +02:00
|
|
|
from helpdesk.views.staff import subscribe_to_ticket_updates, User
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
new_ticket_ccs = []
|
2022-07-25 03:50:49 +02:00
|
|
|
for __, cced_email in cc_list:
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
cced_email = cced_email.strip()
|
|
|
|
if cced_email == ticket.queue.email_address:
|
|
|
|
continue
|
|
|
|
|
|
|
|
user = None
|
|
|
|
|
|
|
|
try:
|
2022-07-25 03:52:58 +02:00
|
|
|
user = User.objects.get(email=cced_email) # @UndefinedVariable
|
2018-12-28 17:32:49 +01:00
|
|
|
except User.DoesNotExist:
|
2018-12-28 16:53:28 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
ticket_cc = subscribe_to_ticket_updates(
|
|
|
|
ticket=ticket, user=user, email=cced_email)
|
2018-12-28 16:53:28 +01:00
|
|
|
new_ticket_ccs.append(ticket_cc)
|
2020-11-25 10:20:36 +01:00
|
|
|
except ValidationError:
|
2018-12-28 16:53:28 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
return new_ticket_ccs
|
|
|
|
|
|
|
|
|
|
|
|
def create_object_from_email_message(message, ticket_id, payload, files, logger):
|
|
|
|
|
|
|
|
ticket, previous_followup, new = None, None, False
|
|
|
|
now = timezone.now()
|
|
|
|
|
|
|
|
queue = payload['queue']
|
|
|
|
sender_email = payload['sender_email']
|
|
|
|
|
|
|
|
to_list = getaddresses(message.get_all('To', []))
|
|
|
|
cc_list = getaddresses(message.get_all('Cc', []))
|
|
|
|
|
|
|
|
message_id = message.get('Message-Id')
|
|
|
|
in_reply_to = message.get('In-Reply-To')
|
|
|
|
|
2021-09-23 15:07:12 +02:00
|
|
|
if message_id:
|
|
|
|
message_id = message_id.strip()
|
|
|
|
|
|
|
|
if in_reply_to:
|
|
|
|
in_reply_to = in_reply_to.strip()
|
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
if in_reply_to is not None:
|
|
|
|
try:
|
2022-07-12 12:34:19 +02:00
|
|
|
queryset = FollowUp.objects.filter(
|
|
|
|
message_id=in_reply_to).order_by('-date')
|
2018-12-28 16:53:28 +01:00
|
|
|
if queryset.count() > 0:
|
|
|
|
previous_followup = queryset.first()
|
|
|
|
ticket = previous_followup.ticket
|
|
|
|
except FollowUp.DoesNotExist:
|
2018-12-28 17:32:49 +01:00
|
|
|
pass # play along. The header may be wrong
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
if previous_followup is None and ticket_id is not None:
|
|
|
|
try:
|
|
|
|
ticket = Ticket.objects.get(id=ticket_id)
|
|
|
|
except Ticket.DoesNotExist:
|
|
|
|
ticket = None
|
2020-10-30 19:59:16 +01:00
|
|
|
else:
|
|
|
|
new = False
|
|
|
|
# Check if the ticket has been merged to another ticket
|
|
|
|
if ticket.merged_to:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info("Ticket has been merged to %s" %
|
|
|
|
ticket.merged_to.ticket)
|
2020-10-30 19:59:16 +01:00
|
|
|
# Use the ticket in which it was merged to for next operations
|
|
|
|
ticket = ticket.merged_to
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
# New issue, create a new <Ticket> instance
|
|
|
|
if ticket is None:
|
|
|
|
if not settings.QUEUE_EMAIL_BOX_UPDATE_ONLY:
|
|
|
|
ticket = Ticket.objects.create(
|
2018-12-28 17:32:49 +01:00
|
|
|
title=payload['subject'],
|
|
|
|
queue=queue,
|
|
|
|
submitter_email=sender_email,
|
|
|
|
created=now,
|
|
|
|
description=payload['body'],
|
|
|
|
priority=payload['priority'],
|
2018-12-28 16:53:28 +01:00
|
|
|
)
|
|
|
|
ticket.save()
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.debug("Created new ticket %s-%s" %
|
|
|
|
(ticket.queue.slug, ticket.id))
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
new = True
|
|
|
|
|
|
|
|
# Old issue being re-opened
|
|
|
|
elif ticket.status == Ticket.CLOSED_STATUS:
|
|
|
|
ticket.status = Ticket.REOPENED_STATUS
|
|
|
|
ticket.save()
|
|
|
|
|
|
|
|
f = FollowUp(
|
2018-12-28 17:32:49 +01:00
|
|
|
ticket=ticket,
|
2022-07-12 12:34:19 +02:00
|
|
|
title=_('E-Mail Received from %(sender_email)s' %
|
|
|
|
{'sender_email': sender_email}),
|
2018-12-28 17:32:49 +01:00
|
|
|
date=now,
|
|
|
|
public=True,
|
2021-04-13 14:04:08 +02:00
|
|
|
comment=payload.get('full_body', payload['body']) or "",
|
2018-12-28 17:32:49 +01:00
|
|
|
message_id=message_id
|
2018-12-28 16:53:28 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if ticket.status == Ticket.REOPENED_STATUS:
|
|
|
|
f.new_status = Ticket.REOPENED_STATUS
|
2022-07-12 12:34:19 +02:00
|
|
|
f.title = _('Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
|
|
|
|
{'sender_email': sender_email})
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
f.save()
|
|
|
|
logger.debug("Created new FollowUp for Ticket")
|
|
|
|
|
|
|
|
logger.info("[%s-%s] %s" % (ticket.queue.slug, ticket.id, ticket.title,))
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
attached = process_attachments(f, files)
|
|
|
|
for att_file in attached:
|
2020-11-25 10:20:36 +01:00
|
|
|
logger.info(
|
|
|
|
"Attachment '%s' (with size %s) successfully added to ticket from email.",
|
|
|
|
att_file[0], att_file[1].size
|
|
|
|
)
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
context = safe_template_context(ticket)
|
|
|
|
|
|
|
|
new_ticket_ccs = []
|
|
|
|
new_ticket_ccs.append(create_ticket_cc(ticket, to_list + cc_list))
|
|
|
|
|
2021-04-15 10:39:55 +02:00
|
|
|
autoreply = is_autoreply(message)
|
|
|
|
if autoreply:
|
2022-07-12 12:34:19 +02:00
|
|
|
logger.info(
|
|
|
|
"Message seems to be auto-reply, not sending any emails back to the sender")
|
2018-12-28 16:53:28 +01:00
|
|
|
else:
|
2021-04-15 10:39:55 +02:00
|
|
|
# send mail to appropriate people now depending on what objects
|
|
|
|
# were created and who was CC'd
|
|
|
|
# Add auto-reply headers because it's an auto-reply and we must
|
|
|
|
extra_headers = {
|
|
|
|
'In-Reply-To': message_id,
|
|
|
|
"Auto-Submitted": "auto-replied",
|
|
|
|
"X-Auto-Response-Suppress": "All",
|
|
|
|
"Precedence": "auto_reply",
|
|
|
|
}
|
|
|
|
if new:
|
2018-12-28 16:53:28 +01:00
|
|
|
ticket.send(
|
2021-04-15 10:39:55 +02:00
|
|
|
{'submitter': ('newticket_submitter', context),
|
|
|
|
'new_ticket_cc': ('newticket_cc', context),
|
|
|
|
'ticket_cc': ('newticket_cc', context)},
|
2018-12-28 16:53:28 +01:00
|
|
|
fail_silently=True,
|
2021-04-15 10:39:55 +02:00
|
|
|
extra_headers=extra_headers,
|
2018-12-28 16:53:28 +01:00
|
|
|
)
|
2021-04-15 10:39:55 +02:00
|
|
|
else:
|
|
|
|
context.update(comment=f.comment)
|
|
|
|
ticket.send(
|
|
|
|
{'submitter': ('newticket_submitter', context),
|
|
|
|
'assigned_to': ('updated_owner', context)},
|
|
|
|
fail_silently=True,
|
|
|
|
extra_headers=extra_headers,
|
|
|
|
)
|
|
|
|
if queue.enable_notifications_on_email_events:
|
|
|
|
ticket.send(
|
|
|
|
{'ticket_cc': ('updated_cc', context)},
|
|
|
|
fail_silently=True,
|
|
|
|
extra_headers=extra_headers,
|
|
|
|
)
|
2018-12-28 16:53:28 +01:00
|
|
|
|
|
|
|
return ticket
|
|
|
|
|
|
|
|
|
2022-07-25 01:56:13 +02:00
|
|
|
def get_ticket_id_from_subject_slug(
|
|
|
|
queue_slug: str,
|
|
|
|
subject: str,
|
|
|
|
logger: logging.Logger
|
|
|
|
) -> typing.Optional[int]:
|
|
|
|
"""Get a ticket id from the subject string
|
|
|
|
|
|
|
|
Performs a match on the subject using the queue_slug as reference,
|
|
|
|
returning the ticket id if a match is found.
|
|
|
|
"""
|
|
|
|
matchobj = re.match(r".*\[" + queue_slug + r"-(?P<id>\d+)\]", subject)
|
|
|
|
ticket_id = None
|
|
|
|
if matchobj:
|
|
|
|
# This is a reply or forward.
|
|
|
|
ticket_id = matchobj.group('id')
|
|
|
|
logger.info("Matched tracking ID %s-%s" % (queue_slug, ticket_id))
|
|
|
|
else:
|
|
|
|
logger.info("No tracking ID matched.")
|
|
|
|
return ticket_id
|
|
|
|
|
|
|
|
|
2022-07-25 02:22:32 +02:00
|
|
|
def add_file_if_always_save_incoming_email_message(
|
2022-07-25 03:47:57 +02:00
|
|
|
files_,
|
2022-07-25 02:22:32 +02:00
|
|
|
message: str
|
|
|
|
) -> None:
|
|
|
|
"""When `settings.HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE` is `True`
|
|
|
|
add a file to the files_ list"""
|
|
|
|
if getattr(django_settings, 'HELPDESK_ALWAYS_SAVE_INCOMING_EMAIL_MESSAGE', False):
|
|
|
|
# save message as attachment in case of some complex markup renders
|
|
|
|
# wrong
|
|
|
|
files_.append(
|
|
|
|
SimpleUploadedFile(
|
|
|
|
_("original_message.eml").replace(
|
|
|
|
".eml",
|
|
|
|
timezone.localtime().strftime("_%d-%m-%Y_%H:%M") + ".eml"
|
|
|
|
),
|
|
|
|
str(message).encode("utf-8"),
|
|
|
|
'text/plain'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def get_encoded_body(body: str) -> str:
|
|
|
|
try:
|
|
|
|
return body.encode('ascii').decode('unicode_escape')
|
|
|
|
except UnicodeEncodeError:
|
2022-07-25 02:41:40 +02:00
|
|
|
return body
|
2022-07-25 02:22:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_body_from_fragments(body) -> str:
|
|
|
|
"""Gets a body from the fragments, joined by a double line break"""
|
|
|
|
return "\n\n".join(f.content for f in EmailReplyParser.read(body).fragments)
|
|
|
|
|
|
|
|
|
|
|
|
def get_email_body_from_part_payload(part) -> str:
|
|
|
|
"""Gets an decoded body from the payload part, if the decode fails,
|
|
|
|
returns without encoding"""
|
|
|
|
try:
|
|
|
|
return encoding.smart_str(
|
|
|
|
part.get_payload(decode=True)
|
|
|
|
)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return encoding.smart_str(
|
|
|
|
part.get_payload(decode=False)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-07-25 01:47:00 +02:00
|
|
|
def object_from_message(message: str,
|
|
|
|
queue: Queue,
|
|
|
|
logger: logging.Logger
|
|
|
|
) -> Ticket:
|
2018-10-24 18:20:12 +02:00
|
|
|
# 'message' must be an RFC822 formatted message.
|
|
|
|
message = email.message_from_string(message)
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
subject = message.get('subject', _('Comment from e-mail'))
|
2022-07-12 12:34:19 +02:00
|
|
|
subject = decode_mail_headers(
|
|
|
|
decodeUnknown(message.get_charset(), subject))
|
2018-10-24 18:20:12 +02:00
|
|
|
for affix in STRIPPED_SUBJECT_STRINGS:
|
|
|
|
subject = subject.replace(affix, "")
|
|
|
|
subject = subject.strip()
|
|
|
|
|
|
|
|
sender = message.get('from', _('Unknown Sender'))
|
|
|
|
sender = decode_mail_headers(decodeUnknown(message.get_charset(), sender))
|
2021-04-15 10:39:55 +02:00
|
|
|
|
2020-07-30 10:46:15 +02:00
|
|
|
# to address bug #832, we wrap all the text in front of the email address in
|
|
|
|
# double quotes by using replace() on the email string. Then,
|
|
|
|
# take first item of list, second item of tuple is the actual email address.
|
|
|
|
# Note that the replace won't work on just an email with no real name,
|
|
|
|
# but the getaddresses() function seems to be able to handle just unclosed quotes
|
|
|
|
# correctly. Not ideal, but this seems to work for now.
|
2022-07-12 12:34:19 +02:00
|
|
|
sender_email = email.utils.getaddresses(
|
|
|
|
['\"' + sender.replace('<', '\" <')])[0][1]
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
for ignore in IgnoreEmail.objects.filter(Q(queues=queue) | Q(queues__isnull=True)):
|
|
|
|
if ignore.test(sender_email):
|
2022-07-25 01:43:43 +02:00
|
|
|
# By returning 'False' the message will be kept in the mailbox,
|
|
|
|
# and the 'True' will cause the message to be deleted.
|
|
|
|
return not ignore.keep_in_mailbox
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-07-25 01:56:13 +02:00
|
|
|
ticket_id: typing.Optional[int] = get_ticket_id_from_subject_slug(
|
|
|
|
queue.slug,
|
|
|
|
subject,
|
|
|
|
logger
|
|
|
|
)
|
2018-10-24 18:20:12 +02:00
|
|
|
|
|
|
|
body = None
|
2021-04-13 14:04:08 +02:00
|
|
|
full_body = None
|
2018-10-24 18:20:12 +02:00
|
|
|
counter = 0
|
|
|
|
files = []
|
|
|
|
|
|
|
|
for part in message.walk():
|
|
|
|
if part.get_content_maintype() == 'multipart':
|
|
|
|
continue
|
|
|
|
|
|
|
|
name = part.get_param("name")
|
|
|
|
if name:
|
|
|
|
name = email.utils.collapse_rfc2231_value(name)
|
|
|
|
|
|
|
|
if part.get_content_maintype() == 'text' and name is None:
|
|
|
|
if part.get_content_subtype() == 'plain':
|
2019-03-26 14:50:45 +01:00
|
|
|
body = part.get_payload(decode=True)
|
|
|
|
# https://github.com/django-helpdesk/django-helpdesk/issues/732
|
|
|
|
if part['Content-Transfer-Encoding'] == '8bit' and part.get_content_charset() == 'utf-8':
|
|
|
|
body = body.decode('unicode_escape')
|
|
|
|
body = decodeUnknown(part.get_content_charset(), body)
|
2021-04-13 14:04:08 +02:00
|
|
|
# have to use django_settings here so overwritting it works in tests
|
|
|
|
# the default value is False anyway
|
2022-07-25 01:43:43 +02:00
|
|
|
if ticket_id is None and getattr(django_settings, 'HELPDESK_FULL_FIRST_MESSAGE_FROM_EMAIL', False):
|
2022-07-12 12:34:19 +02:00
|
|
|
# first message in thread, we save full body to avoid
|
|
|
|
# losing forwards and things like that
|
2022-07-25 02:41:40 +02:00
|
|
|
full_body = get_body_from_fragments(body)
|
|
|
|
body = EmailReplyParser.parse_reply(body)
|
2021-04-13 14:04:08 +02:00
|
|
|
else:
|
2022-07-12 12:34:19 +02:00
|
|
|
# second and other reply, save only first part of the
|
|
|
|
# message
|
2021-04-13 14:04:08 +02:00
|
|
|
body = EmailReplyParser.parse_reply(body)
|
|
|
|
full_body = body
|
2018-10-24 18:20:12 +02:00
|
|
|
# workaround to get unicode text out rather than escaped text
|
2022-07-25 02:22:32 +02:00
|
|
|
body = get_encoded_body(body)
|
2018-10-24 18:20:12 +02:00
|
|
|
logger.debug("Discovered plain text MIME part")
|
|
|
|
else:
|
2022-07-25 02:22:32 +02:00
|
|
|
email_body = get_email_body_from_part_payload(part)
|
2020-01-23 15:30:08 +01:00
|
|
|
|
2021-04-13 14:04:08 +02:00
|
|
|
if not body and not full_body:
|
2022-07-12 12:34:19 +02:00
|
|
|
# no text has been parsed so far - try such deep parsing
|
|
|
|
# for some messages
|
|
|
|
altered_body = email_body.replace(
|
|
|
|
"</p>", "</p>\n").replace("<br", "\n<br")
|
2021-04-13 14:04:08 +02:00
|
|
|
mail = BeautifulSoup(str(altered_body), "html.parser")
|
|
|
|
full_body = mail.get_text()
|
|
|
|
|
|
|
|
if "<body" not in email_body:
|
|
|
|
email_body = f"<body>{email_body}</body>"
|
|
|
|
|
|
|
|
payload = (
|
|
|
|
'<html>'
|
|
|
|
'<head>'
|
|
|
|
'<meta charset="utf-8" />'
|
|
|
|
'</head>'
|
|
|
|
'%s'
|
|
|
|
'</html>'
|
|
|
|
) % email_body
|
2018-10-24 18:20:12 +02:00
|
|
|
files.append(
|
2022-07-12 12:34:19 +02:00
|
|
|
SimpleUploadedFile(
|
|
|
|
_("email_html_body.html"), payload.encode("utf-8"), 'text/html')
|
2018-10-24 18:20:12 +02:00
|
|
|
)
|
|
|
|
logger.debug("Discovered HTML MIME part")
|
|
|
|
else:
|
|
|
|
if not name:
|
|
|
|
ext = mimetypes.guess_extension(part.get_content_type())
|
|
|
|
name = "part-%i%s" % (counter, ext)
|
2021-03-08 18:57:07 +01:00
|
|
|
else:
|
|
|
|
name = ("part-%i_" % counter) + name
|
2020-11-25 10:20:36 +01:00
|
|
|
|
2021-04-13 14:04:08 +02:00
|
|
|
# # FIXME: this code gets the paylods, then does something with it and then completely ignores it
|
|
|
|
# # writing the part.get_payload(decode=True) instead; and then the payload variable is
|
|
|
|
# # replaced by some dict later.
|
|
|
|
# # the `payloadToWrite` has been also ignored so was commented
|
|
|
|
# payload = part.get_payload()
|
|
|
|
# if isinstance(payload, list):
|
|
|
|
# payload = payload.pop().as_string()
|
|
|
|
# # payloadToWrite = payload
|
|
|
|
# # check version of python to ensure use of only the correct error type
|
|
|
|
# non_b64_err = TypeError
|
|
|
|
# try:
|
|
|
|
# logger.debug("Try to base64 decode the attachment payload")
|
|
|
|
# # payloadToWrite = base64.decodebytes(payload)
|
|
|
|
# except non_b64_err:
|
|
|
|
# logger.debug("Payload was not base64 encoded, using raw bytes")
|
|
|
|
# # payloadToWrite = payload
|
2022-07-12 12:34:19 +02:00
|
|
|
files.append(SimpleUploadedFile(name, part.get_payload(
|
|
|
|
decode=True), mimetypes.guess_type(name)[0]))
|
2018-10-24 18:20:12 +02:00
|
|
|
logger.debug("Found MIME attachment %s" % name)
|
|
|
|
|
|
|
|
counter += 1
|
|
|
|
|
|
|
|
if not body:
|
2019-02-16 08:18:07 +01:00
|
|
|
mail = BeautifulSoup(str(message), "html.parser")
|
2018-12-19 17:12:46 +01:00
|
|
|
beautiful_body = mail.find('body')
|
|
|
|
if beautiful_body:
|
|
|
|
try:
|
|
|
|
body = beautiful_body.text
|
2021-04-13 14:04:08 +02:00
|
|
|
full_body = body
|
2018-12-19 17:12:46 +01:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
if not body:
|
2019-02-16 08:18:07 +01:00
|
|
|
body = ""
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-07-25 02:22:32 +02:00
|
|
|
add_file_if_always_save_incoming_email_message(files, message)
|
2021-04-13 14:04:08 +02:00
|
|
|
|
2018-10-24 18:20:12 +02:00
|
|
|
smtp_priority = message.get('priority', '')
|
|
|
|
smtp_importance = message.get('importance', '')
|
|
|
|
high_priority_types = {'high', 'important', '1', 'urgent'}
|
2022-07-12 12:34:19 +02:00
|
|
|
priority = 2 if high_priority_types & {
|
|
|
|
smtp_priority, smtp_importance} else 3
|
2018-12-28 17:32:49 +01:00
|
|
|
|
2018-12-28 16:53:28 +01:00
|
|
|
payload = {
|
|
|
|
'body': body,
|
2021-04-13 14:04:08 +02:00
|
|
|
'full_body': full_body or body,
|
2018-12-28 16:53:28 +01:00
|
|
|
'subject': subject,
|
|
|
|
'queue': queue,
|
|
|
|
'sender_email': sender_email,
|
|
|
|
'priority': priority,
|
|
|
|
'files': files,
|
|
|
|
}
|
2018-10-24 18:20:12 +02:00
|
|
|
|
2022-07-25 01:43:43 +02:00
|
|
|
return create_object_from_email_message(message, ticket_id, payload, files, logger=logger)
|