aaP_erikator
Make a backup first
cp /www/server/panel/plugin/mail_sys/script/cut_maillog.py /www/server/panel/plugin/mail_sys/script/cut_maillog.py.bak
Then create new script
nano /www/server/panel/plugin/mail_sys/script/cut_maillog.py
Copy paste below code: (the same code that I used to fix another issue here https://www.aapanel.com/forum/d/22452-bt-python-reaching-100-cpu-usage-solution)
# coding: utf-8
# -----------------------------
# Mail Log Processing Script
# -----------------------------
from datetime import datetime, timedelta
from dateutil.parser import parse
import os, sys, time, re
import logging
from itertools import islice
os.chdir('/www/server/panel')
sys.path.insert(0, './')
sys.path.insert(0, 'class/')
sys.path.insert(0, 'BTPanel/')
import public
class Cut:
def __init__(self):
self.back_log_path = '/www/server/panel/data/mail/back_log'
self.chunk_size = 1000 # Process logs in chunks
self.setup_logging()
def setup_logging(self):
logging.basicConfig(
filename='/www/server/panel/logs/cut_maillog.log',
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
def M2(self, table_name):
import db
sql = db.Sql()
sql._Sql__DB_FILE = '/www/vmail/postfixmaillog.db'
sql._Sql__encrypt_keys = []
return sql.table(table_name)
def parse_log_time(self, line):
try:
# First try ISO format
if line[:4].isdigit():
return int(parse(line[:31]).timestamp())
# Then try standard format
current_year = datetime.now().year
timestamp_str = f"{line[:15]} {current_year}"
return int(datetime.strptime(timestamp_str, '%b %d %H:%M:%S %Y').timestamp())
except:
return int(datetime.now().timestamp())
def day_log_cut(self):
try:
day = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
time0 = int(day.timestamp())
path = '/var/log/mail.log' if "ubuntu" in public.get_linux_distribution().lower() else '/var/log/maillog'
if not os.path.exists(path):
logging.error(f"Log file not found: {path}")
return False
today = datetime.now().strftime('%Y-%m-%d')
day_log = f"{self.back_log_path}/{today}_mail.log"
# Process file in chunks to save memory
with open(path, 'r') as source, open(day_log, 'w') as target:
while True:
chunk = list(islice(source, self.chunk_size))
if not chunk:
break
for line in chunk:
try:
log_time = self.parse_log_time(line)
if log_time >= time0:
target.write(line)
except Exception as e:
logging.error(f"Error processing line: {str(e)}")
continue
return True
except Exception as e:
logging.error(f"Error in day_log_cut: {str(e)}")
return False
def get_hour_errinfo(self, timestamp):
try:
start = int(timestamp)
end = start + 3599
current_time = int(time.time())
if current_time < start:
return
cache_key = f'mail_sys:get_hour_errinfo_{timestamp}'
if public.cache_get(cache_key):
return
today = datetime.now().strftime('%Y-%m-%d')
day_log = f"{self.back_log_path}/{today}_mail.log"
if not os.path.exists(day_log):
return
seen_recipients = set()
batch_data = []
batch_size = 100
with open(day_log, 'r') as f:
for line in f:
try:
log_time = self.parse_log_time(line)
if not (start <= log_time <= end):
continue
if 'status=sent' in line or 'postmaster@' in line:
continue
recipient = re.search(r'to=<([^>]+)>', line)
status = re.search(r'status=([^ ]+)', line)
if not (recipient and status) or status.group(1) == 'sent':
continue
recipient = recipient.group(1)
if recipient in seen_recipients:
continue
seen_recipients.add(recipient)
err_data = {
'recipient': recipient,
'domain': recipient.split('@')[1],
'status': status.group(1),
'delay': re.search(r'delay=(\d+(\.\d+)?)', line).group(1) if re.search(r'delay=(\d+(\.\d+)?)', line) else '',
'delays': re.search(r'delays=([\d./*]+)', line).group(1) if re.search(r'delays=([\d./*]+)', line) else '',
'dsn': re.search(r'dsn=([\d.]+)', line).group(1) if re.search(r'dsn=([\d.]+)', line) else '',
'relay': re.search(r'relay=(.*?)(?=,| )', line).group(1) if re.search(r'relay=(.*?)(?=,| )', line) else '',
'err_info': re.search(r'\((.*?)\)', line).group(1) if re.search(r'\((.*?)\)', line) else '',
'created': log_time
}
batch_data.append(err_data)
if len(batch_data) >= batch_size:
self.M2('mail_errlog').insert_many(batch_data)
batch_data = []
except Exception as e:
logging.error(f"Error processing line in get_hour_errinfo: {str(e)}")
continue
# Insert remaining data
if batch_data:
self.M2('mail_errlog').insert_many(batch_data)
# Set cache with appropriate expiration
cache_duration = 24*60*60 if current_time > end else 30*60
public.cache_set(cache_key, True, cache_duration)
except Exception as e:
logging.error(f"Error in get_hour_errinfo: {str(e)}")
def get_data_info(self):
try:
current_time = datetime.now()
start_of_day = current_time.replace(hour=0, minute=0, second=0, microsecond=0)
hour_timestamps = [int((start_of_day + timedelta(hours=i)).timestamp()) for i in range(24)]
for timestamp in hour_timestamps:
self.get_hour_errinfo(timestamp)
except Exception as e:
logging.error(f"Error in get_data_info: {str(e)}")
if __name__ == '__main__':
cut = Cut()
cut.day_log_cut()
cut.get_data_info()
logging.info("Process completed successfully")
Then you can create the logs directory and file
mkdir -p /www/server/panel/logs
mkdir -p /www/server/panel/data/mail/back_log
chmod 755 /www/server/panel/data/mail/back_log
touch /www/server/panel/logs/cut_maillog.log
chmod 644 /www/server/panel/logs/cut_maillog.log
then setup symlinks that's necessary: (for ubuntu)
ln -s /www/server/panel/vmail/log/maillog /var/log/mail.log
You can test run the script
python /www/server/panel/plugin/mail_sys/script/cut_maillog.py
and check the log /www/server/panel/logs/cut_maillog.log