Edit File by line
/home/barbar84/www/wp-conte.../plugins/sujqvwi/ShExBy/smshex_r.../opt/sharedra...
File: alp.py
#! /opt/imh-python/bin/python3
[0] Fix | Delete
''' Apache Log Parser - Parse Apache domain access logs '''
[1] Fix | Delete
[2] Fix | Delete
import os
[3] Fix | Delete
import sys
[4] Fix | Delete
import logging
[5] Fix | Delete
import re
[6] Fix | Delete
import json
[7] Fix | Delete
from argparse import ArgumentParser
[8] Fix | Delete
from time import time
[9] Fix | Delete
from collections import defaultdict
[10] Fix | Delete
from platform import node as hostname
[11] Fix | Delete
[12] Fix | Delete
import envinfo
[13] Fix | Delete
from dns import resolver, reversename, exception
[14] Fix | Delete
[15] Fix | Delete
from rads import setup_logging, color
[16] Fix | Delete
[17] Fix | Delete
__maintainer__ = "Daniel K"
[18] Fix | Delete
__email__ = "danielk@inmotionhosting.com"
[19] Fix | Delete
__version__ = "1.0.2"
[20] Fix | Delete
__date__ = "2016-09-16"
[21] Fix | Delete
[22] Fix | Delete
[23] Fix | Delete
# Location of Apache domain logs for users.
[24] Fix | Delete
# The bit at the end is for str.format() to allow users to be added there
[25] Fix | Delete
USER_DOMLOG_DIR = envinfo.get_data()['apache_domlogs'] + "/{0!s}/"
[26] Fix | Delete
# Maximum number of log files on shared servers
[27] Fix | Delete
MAX_LOGS_SHARED = 50
[28] Fix | Delete
[29] Fix | Delete
LOGGER = logging.getLogger(__name__)
[30] Fix | Delete
[31] Fix | Delete
[32] Fix | Delete
def ptr_lookup(ip_addr):
[33] Fix | Delete
"""Return PTR for IP address"""
[34] Fix | Delete
try:
[35] Fix | Delete
myresolver = resolver.Resolver()
[36] Fix | Delete
myresolver.lifetime = 1.0
[37] Fix | Delete
myresolver.timeout = 1.0
[38] Fix | Delete
[39] Fix | Delete
question_name = reversename.from_address(ip_addr)
[40] Fix | Delete
answers = myresolver.query(question_name, "PTR")
[41] Fix | Delete
return str(answers[0])
[42] Fix | Delete
[43] Fix | Delete
except resolver.NXDOMAIN:
[44] Fix | Delete
return "No Record Found"
[45] Fix | Delete
except exception.Timeout:
[46] Fix | Delete
LOGGER.debug("Query Timed out looking for %s", ip_addr)
[47] Fix | Delete
return "Query Timed Out"
[48] Fix | Delete
except resolver.NoNameservers:
[49] Fix | Delete
LOGGER.debug("No nameservers found for %s", ip_addr)
[50] Fix | Delete
return "No nameservers found"
[51] Fix | Delete
except resolver.NoAnswer:
[52] Fix | Delete
LOGGER.debug("No answer for %s", ip_addr)
[53] Fix | Delete
return "No Answer"
[54] Fix | Delete
[55] Fix | Delete
[56] Fix | Delete
def domlog_lines(source):
[57] Fix | Delete
'''Process log lines'''
[58] Fix | Delete
[59] Fix | Delete
if source == "-":
[60] Fix | Delete
LOGGER.info("Processing from STDIN.")
[61] Fix | Delete
yield from sys.stdin
[62] Fix | Delete
else:
[63] Fix | Delete
filename = source
[64] Fix | Delete
LOGGER.info("Process file %s", source)
[65] Fix | Delete
if os.path.exists(filename):
[66] Fix | Delete
with open(filename, encoding='utf-8') as file_handle:
[67] Fix | Delete
try:
[68] Fix | Delete
yield from file_handle
[69] Fix | Delete
except OSError:
[70] Fix | Delete
LOGGER.error("Error reading file %s", filename)
[71] Fix | Delete
[72] Fix | Delete
[73] Fix | Delete
def trim_dict(dictionary, entries):
[74] Fix | Delete
'''Trim dictionary to top entries ordered by value'''
[75] Fix | Delete
[76] Fix | Delete
trimmed_dict = {}
[77] Fix | Delete
[78] Fix | Delete
count = 0
[79] Fix | Delete
[80] Fix | Delete
for item in sorted(dictionary, key=lambda x: dictionary[x], reverse=True):
[81] Fix | Delete
count = count + 1
[82] Fix | Delete
trimmed_dict[item] = dictionary[item]
[83] Fix | Delete
if count >= entries:
[84] Fix | Delete
return trimmed_dict
[85] Fix | Delete
[86] Fix | Delete
return trimmed_dict
[87] Fix | Delete
[88] Fix | Delete
[89] Fix | Delete
def parse_domlogs(source, numlines=10, add_ptr=False):
[90] Fix | Delete
'''Process log lines'''
[91] Fix | Delete
[92] Fix | Delete
results = {
[93] Fix | Delete
'status_codes': defaultdict(int),
[94] Fix | Delete
'daily_hourly': defaultdict(lambda: defaultdict(int)),
[95] Fix | Delete
'requests': defaultdict(int),
[96] Fix | Delete
'user_agents': defaultdict(int),
[97] Fix | Delete
'top_ips': defaultdict(int),
[98] Fix | Delete
'linecount': 0,
[99] Fix | Delete
}
[100] Fix | Delete
[101] Fix | Delete
# Single regex to match all log lines.
[102] Fix | Delete
# It stores each entry in named groups, even though not all groups
[103] Fix | Delete
# are used by this script. You can see the names listed below
[104] Fix | Delete
# as (?<name>...).
[105] Fix | Delete
rx_logline = re.compile(
[106] Fix | Delete
r'^(?P<ips>(?P<ip>[0-9.]+|[a-fA-F0-9:]+)' # Could handle multiple IPs
[107] Fix | Delete
r'(,\s*[0-9.]+|[a-fA-F0-9:]+)*)\s+'
[108] Fix | Delete
r'(?P<logname>\S+)\s+(?P<user>\S+)\s+' # Could find logged in users
[109] Fix | Delete
r'\[(?P<date>[0-9]+/[a-zA-Z]+/[0-9]+):'
[110] Fix | Delete
r'(?P<time>(?P<hour>[0-9]+):[0-9]+:[0-9]+ [0-9-+]+)\]\s+'
[111] Fix | Delete
r'"(?P<request>(?P<type>[A-Z]+)\s+(?P<uri>\S+)) [^"]*"\s+'
[112] Fix | Delete
r'(?P<status>[0-9]+|-)\s+(?P<size>[0-9]+|-)\s+'
[113] Fix | Delete
r'"(?P<referrer>[^"]*)"\s+'
[114] Fix | Delete
r'"(?P<useragent>.*)"$'
[115] Fix | Delete
)
[116] Fix | Delete
[117] Fix | Delete
for line in domlog_lines(source):
[118] Fix | Delete
results['linecount'] = results['linecount'] + 1
[119] Fix | Delete
match_logline = rx_logline.search(line)
[120] Fix | Delete
if match_logline is not None:
[121] Fix | Delete
results['status_codes'][match_logline.group('status')] = (
[122] Fix | Delete
results['status_codes'][match_logline.group('status')] + 1
[123] Fix | Delete
)
[124] Fix | Delete
request = "{: <4} {}".format(
[125] Fix | Delete
match_logline.group('status'), match_logline.group('request')
[126] Fix | Delete
)
[127] Fix | Delete
results['requests'][request] = results['requests'][request] + 1
[128] Fix | Delete
results['top_ips'][match_logline.group('ip')] = (
[129] Fix | Delete
results['top_ips'][match_logline.group('ip')] + 1
[130] Fix | Delete
)
[131] Fix | Delete
results['user_agents'][match_logline.group('useragent')] = (
[132] Fix | Delete
results['user_agents'][match_logline.group('useragent')] + 1
[133] Fix | Delete
)
[134] Fix | Delete
date = match_logline.group('date')
[135] Fix | Delete
hour = match_logline.group('hour')
[136] Fix | Delete
results['daily_hourly'][date][hour] = (
[137] Fix | Delete
results['daily_hourly'][date][hour] + 1
[138] Fix | Delete
)
[139] Fix | Delete
else:
[140] Fix | Delete
LOGGER.warning("Missed log line: %s", line)
[141] Fix | Delete
[142] Fix | Delete
results['requests'] = trim_dict(results['requests'], numlines)
[143] Fix | Delete
results['user_agents'] = trim_dict(results['user_agents'], numlines)
[144] Fix | Delete
[145] Fix | Delete
results['top_ips'] = trim_dict(results['top_ips'], numlines)
[146] Fix | Delete
[147] Fix | Delete
if add_ptr:
[148] Fix | Delete
ip_ptr = defaultdict(int)
[149] Fix | Delete
for ip_addr in results['top_ips']:
[150] Fix | Delete
ptr_record = ptr_lookup(ip_addr)
[151] Fix | Delete
[152] Fix | Delete
ip_with_ptr = f"{ip_addr: <15} {ptr_record}"
[153] Fix | Delete
ip_ptr[ip_with_ptr] = results['top_ips'][ip_addr]
[154] Fix | Delete
results['top_ips_with_ptr'] = ip_ptr
[155] Fix | Delete
[156] Fix | Delete
return results
[157] Fix | Delete
[158] Fix | Delete
[159] Fix | Delete
def logs_for_user(cpuser):
[160] Fix | Delete
'''Array of domlogs for cpuser. If cpuser is None, return all domlogs.'''
[161] Fix | Delete
[162] Fix | Delete
if cpuser is None:
[163] Fix | Delete
LOGGER.info("Choosing domlog for all users")
[164] Fix | Delete
cpuser = '.'
[165] Fix | Delete
else:
[166] Fix | Delete
LOGGER.info("Choosing domlog for %s", cpuser)
[167] Fix | Delete
[168] Fix | Delete
logfile_list = []
[169] Fix | Delete
for filename in os.listdir(USER_DOMLOG_DIR.format(cpuser)):
[170] Fix | Delete
if ("_log" not in filename) and ("-ssl" not in filename):
[171] Fix | Delete
if "ftpxferlog" in filename:
[172] Fix | Delete
continue
[173] Fix | Delete
logfile = os.path.join(USER_DOMLOG_DIR.format(cpuser), filename)
[174] Fix | Delete
if os.path.isfile(logfile):
[175] Fix | Delete
logfile_list.append(logfile)
[176] Fix | Delete
[177] Fix | Delete
return logfile_list
[178] Fix | Delete
[179] Fix | Delete
[180] Fix | Delete
def choose_logfile(cpuser):
[181] Fix | Delete
'''
[182] Fix | Delete
Determine log file to use for a cPanel user.
[183] Fix | Delete
This is done by first using any unique file, then using any
[184] Fix | Delete
unique recently updated file, and then preferring size for
[185] Fix | Delete
the remaining files.
[186] Fix | Delete
[187] Fix | Delete
If cpuser is None, search for all logs.
[188] Fix | Delete
'''
[189] Fix | Delete
[190] Fix | Delete
recentlog_list = []
[191] Fix | Delete
[192] Fix | Delete
logfile_list = logs_for_user(cpuser)
[193] Fix | Delete
[194] Fix | Delete
if len(logfile_list) == 0:
[195] Fix | Delete
LOGGER.warning("Could not find valid log file for %s", cpuser)
[196] Fix | Delete
return None
[197] Fix | Delete
if len(logfile_list) == 1:
[198] Fix | Delete
LOGGER.debug("Only one log file for %s: %s", cpuser, logfile_list[0])
[199] Fix | Delete
return logfile_list[0]
[200] Fix | Delete
[201] Fix | Delete
for logfile in logfile_list:
[202] Fix | Delete
if os.path.getmtime(logfile) > (time() - 86400):
[203] Fix | Delete
# File is newer than 24 hours
[204] Fix | Delete
recentlog_list.append(logfile)
[205] Fix | Delete
[206] Fix | Delete
if len(recentlog_list) == 1:
[207] Fix | Delete
LOGGER.debug(
[208] Fix | Delete
"Only one recent log file for %s: %s", cpuser, recentlog_list[0]
[209] Fix | Delete
)
[210] Fix | Delete
return recentlog_list[0]
[211] Fix | Delete
[212] Fix | Delete
if len(recentlog_list) == 0:
[213] Fix | Delete
# If there are no recent files, choose from all files.
[214] Fix | Delete
LOGGER.debug("No recent logs for %s", cpuser)
[215] Fix | Delete
else:
[216] Fix | Delete
logfile_list = recentlog_list
[217] Fix | Delete
[218] Fix | Delete
largest = 0
[219] Fix | Delete
domlog = None
[220] Fix | Delete
[221] Fix | Delete
for logfile in logfile_list:
[222] Fix | Delete
if os.path.getsize(logfile) > largest:
[223] Fix | Delete
largest = os.path.getsize(logfile)
[224] Fix | Delete
domlog = logfile
[225] Fix | Delete
[226] Fix | Delete
return domlog
[227] Fix | Delete
[228] Fix | Delete
[229] Fix | Delete
def print_title(title, width):
[230] Fix | Delete
'''Print pretty header'''
[231] Fix | Delete
[232] Fix | Delete
header_format = "~~ {0!s} ~~{1}"
[233] Fix | Delete
base_header_size = 8
[234] Fix | Delete
[235] Fix | Delete
# If there is not enough room for the title, truncate it
[236] Fix | Delete
title = title[: width - base_header_size]
[237] Fix | Delete
[238] Fix | Delete
head_length = len(title) + base_header_size
[239] Fix | Delete
[240] Fix | Delete
long_bar = "".join("~" for i in range(width - head_length))
[241] Fix | Delete
print(
[242] Fix | Delete
color.green(
[243] Fix | Delete
header_format.format(
[244] Fix | Delete
title,
[245] Fix | Delete
long_bar,
[246] Fix | Delete
)
[247] Fix | Delete
)
[248] Fix | Delete
)
[249] Fix | Delete
[250] Fix | Delete
[251] Fix | Delete
def print_tall(title, array, numlines, width):
[252] Fix | Delete
'''Print pretty data in a tall format, with one entry per line'''
[253] Fix | Delete
[254] Fix | Delete
print_title(title, width)
[255] Fix | Delete
line_count = 0
[256] Fix | Delete
for item in sorted(array, key=lambda x: array[x], reverse=True):
[257] Fix | Delete
line_count = line_count + 1
[258] Fix | Delete
print(f"{array[item]: 6} {item}"[:width])
[259] Fix | Delete
if line_count == numlines:
[260] Fix | Delete
return
[261] Fix | Delete
[262] Fix | Delete
[263] Fix | Delete
def print_wide(title, array, numlines, width):
[264] Fix | Delete
'''Print pretty data in a wide format, with many entries per line'''
[265] Fix | Delete
[266] Fix | Delete
print_title(title, width)
[267] Fix | Delete
line_count = 0
[268] Fix | Delete
current_width = 0
[269] Fix | Delete
for item in array:
[270] Fix | Delete
next_item = f"{item}: {array[item]} "
[271] Fix | Delete
if current_width + len(next_item) >= width:
[272] Fix | Delete
line_count = line_count + 1
[273] Fix | Delete
print()
[274] Fix | Delete
current_width = 0
[275] Fix | Delete
if line_count == numlines:
[276] Fix | Delete
return
[277] Fix | Delete
current_width = current_width + len(next_item)
[278] Fix | Delete
print(next_item, end=' ')
[279] Fix | Delete
[280] Fix | Delete
print()
[281] Fix | Delete
[282] Fix | Delete
[283] Fix | Delete
def parse_args():
[284] Fix | Delete
'''
[285] Fix | Delete
Parse command line arguments
[286] Fix | Delete
'''
[287] Fix | Delete
[288] Fix | Delete
parser = ArgumentParser(description=__doc__)
[289] Fix | Delete
[290] Fix | Delete
parser.add_argument(
[291] Fix | Delete
"-a",
[292] Fix | Delete
"--all",
[293] Fix | Delete
action='store_true',
[294] Fix | Delete
help=(
[295] Fix | Delete
"Search all users. Do not limit search to single user. "
[296] Fix | Delete
"Overrides any usernames or paths given."
[297] Fix | Delete
),
[298] Fix | Delete
)
[299] Fix | Delete
[300] Fix | Delete
parser.add_argument(
[301] Fix | Delete
"-m",
[302] Fix | Delete
"--multilogs",
[303] Fix | Delete
action='store_true',
[304] Fix | Delete
help="Return results for all log files, rather than just one.",
[305] Fix | Delete
)
[306] Fix | Delete
[307] Fix | Delete
ptr_group = parser.add_mutually_exclusive_group()
[308] Fix | Delete
[309] Fix | Delete
ptr_group.add_argument(
[310] Fix | Delete
"-p",
[311] Fix | Delete
"--with-ptr",
[312] Fix | Delete
action='store_true',
[313] Fix | Delete
help="Get PTR records for IPs. This is the default.",
[314] Fix | Delete
)
[315] Fix | Delete
[316] Fix | Delete
ptr_group.add_argument(
[317] Fix | Delete
"-P",
[318] Fix | Delete
"--no-ptr",
[319] Fix | Delete
action='store_true',
[320] Fix | Delete
help="Do not resolve PTRs for IPs. Overrides -p.",
[321] Fix | Delete
)
[322] Fix | Delete
[323] Fix | Delete
parser.add_argument(
[324] Fix | Delete
"-V",
[325] Fix | Delete
"--version",
[326] Fix | Delete
action='store_true',
[327] Fix | Delete
help="Print version information and exit.",
[328] Fix | Delete
)
[329] Fix | Delete
[330] Fix | Delete
output_group = parser.add_argument_group("Output options")
[331] Fix | Delete
[332] Fix | Delete
output_group.add_argument(
[333] Fix | Delete
"-n",
[334] Fix | Delete
"--numlines",
[335] Fix | Delete
action='store',
[336] Fix | Delete
type=int,
[337] Fix | Delete
default=10,
[338] Fix | Delete
help=(
[339] Fix | Delete
"Number of lines to display in each section. " "The default is 10."
[340] Fix | Delete
),
[341] Fix | Delete
)
[342] Fix | Delete
[343] Fix | Delete
output_group.add_argument(
[344] Fix | Delete
"-w",
[345] Fix | Delete
"--width",
[346] Fix | Delete
action='store',
[347] Fix | Delete
type=int,
[348] Fix | Delete
default=110,
[349] Fix | Delete
help="Width of output in characters. The default is 110.",
[350] Fix | Delete
)
[351] Fix | Delete
[352] Fix | Delete
output_group.add_argument(
[353] Fix | Delete
"-j", "--json", action='store_true', help="Output data as JSON instead."
[354] Fix | Delete
)
[355] Fix | Delete
[356] Fix | Delete
logging_parser_group = parser.add_argument_group("Error logging options")
[357] Fix | Delete
logging_group = logging_parser_group.add_mutually_exclusive_group()
[358] Fix | Delete
[359] Fix | Delete
logging_group.add_argument(
[360] Fix | Delete
'-v',
[361] Fix | Delete
'--verbose',
[362] Fix | Delete
dest='loglevel',
[363] Fix | Delete
action='store_const',
[364] Fix | Delete
const='debug',
[365] Fix | Delete
help="Use verbose logging.",
[366] Fix | Delete
)
[367] Fix | Delete
[368] Fix | Delete
logging_group.add_argument(
[369] Fix | Delete
'-q',
[370] Fix | Delete
'--quiet',
[371] Fix | Delete
dest='loglevel',
[372] Fix | Delete
action='store_const',
[373] Fix | Delete
const='critical',
[374] Fix | Delete
help='Log only critical errors',
[375] Fix | Delete
)
[376] Fix | Delete
[377] Fix | Delete
logging_group.add_argument(
[378] Fix | Delete
'--loglevel',
[379] Fix | Delete
dest='loglevel',
[380] Fix | Delete
type=str,
[381] Fix | Delete
choices=['error', 'info', 'debug', 'warning', 'critical'],
[382] Fix | Delete
help=(
[383] Fix | Delete
"Specify the verbosity of logging output. "
[384] Fix | Delete
"The default is 'warning'."
[385] Fix | Delete
),
[386] Fix | Delete
)
[387] Fix | Delete
[388] Fix | Delete
logging_parser_group.add_argument(
[389] Fix | Delete
"-o",
[390] Fix | Delete
"--output",
[391] Fix | Delete
action='store',
[392] Fix | Delete
type=str,
[393] Fix | Delete
default='',
[394] Fix | Delete
help="Output logging to the specified file.",
[395] Fix | Delete
)
[396] Fix | Delete
[397] Fix | Delete
parser.add_argument(
[398] Fix | Delete
'sources',
[399] Fix | Delete
metavar='(USER|LOG)',
[400] Fix | Delete
type=str,
[401] Fix | Delete
nargs='*',
[402] Fix | Delete
help=(
[403] Fix | Delete
"Either a cPanel user or an Apache domain log file. "
[404] Fix | Delete
"'-' will be handled as STDIN. "
[405] Fix | Delete
"If none are given, then the script will attempt to gather "
[406] Fix | Delete
"data from the STDIN."
[407] Fix | Delete
),
[408] Fix | Delete
)
[409] Fix | Delete
[410] Fix | Delete
args = parser.parse_args()
[411] Fix | Delete
[412] Fix | Delete
if args.version:
[413] Fix | Delete
print(f"Apache Log Parser version {__version__}")
[414] Fix | Delete
print(f"Last modified on {__date__}.")
[415] Fix | Delete
sys.exit(0)
[416] Fix | Delete
[417] Fix | Delete
if args.loglevel is None:
[418] Fix | Delete
logging_level = logging.WARNING
[419] Fix | Delete
else:
[420] Fix | Delete
logging_level = getattr(logging, args.loglevel.upper())
[421] Fix | Delete
[422] Fix | Delete
if args.output == '':
[423] Fix | Delete
setup_logging(
[424] Fix | Delete
path='/var/log/messages',
[425] Fix | Delete
loglevel=logging_level,
[426] Fix | Delete
print_out=sys.stderr,
[427] Fix | Delete
)
[428] Fix | Delete
else:
[429] Fix | Delete
setup_logging(path=args.output, loglevel=logging_level, print_out=False)
[430] Fix | Delete
[431] Fix | Delete
if args.no_ptr:
[432] Fix | Delete
show_ptr = False
[433] Fix | Delete
else:
[434] Fix | Delete
show_ptr = True
[435] Fix | Delete
[436] Fix | Delete
if len(args.sources) == 0:
[437] Fix | Delete
LOGGER.info("No sources. Using STDIN.")
[438] Fix | Delete
args.sources.append("-")
[439] Fix | Delete
[440] Fix | Delete
return (
[441] Fix | Delete
args.sources,
[442] Fix | Delete
show_ptr,
[443] Fix | Delete
args.numlines,
[444] Fix | Delete
args.width,
[445] Fix | Delete
args.json,
[446] Fix | Delete
args.all,
[447] Fix | Delete
args.multilogs,
[448] Fix | Delete
)
[449] Fix | Delete
[450] Fix | Delete
[451] Fix | Delete
def print_results(results, numlines, width):
[452] Fix | Delete
'''Print out results to terminal'''
[453] Fix | Delete
[454] Fix | Delete
for (source, result) in results:
[455] Fix | Delete
if result['linecount'] < 1:
[456] Fix | Delete
print(f"{source} is empty.")
[457] Fix | Delete
continue
[458] Fix | Delete
[459] Fix | Delete
print(color.yellow(f"Results for {source}:") + ":")
[460] Fix | Delete
for day in result['daily_hourly']:
[461] Fix | Delete
print_wide(
[462] Fix | Delete
f"Hourly hits ({day})",
[463] Fix | Delete
result['daily_hourly'][day],
[464] Fix | Delete
numlines,
[465] Fix | Delete
width,
[466] Fix | Delete
)
[467] Fix | Delete
print_wide(
[468] Fix | Delete
"HTTP response codes", result['status_codes'], numlines, width
[469] Fix | Delete
)
[470] Fix | Delete
print_tall("Top Requests", result['requests'], numlines, width)
[471] Fix | Delete
print_tall("Top user agents", result['user_agents'], numlines, width)
[472] Fix | Delete
if result['top_ips_with_ptr'] is not None:
[473] Fix | Delete
print_tall(
[474] Fix | Delete
"Top IPs with PTRs", result['top_ips_with_ptr'], numlines, width
[475] Fix | Delete
)
[476] Fix | Delete
else:
[477] Fix | Delete
print_tall("Top IPs", result['top_ips'], numlines, width)
[478] Fix | Delete
[479] Fix | Delete
print("\n")
[480] Fix | Delete
[481] Fix | Delete
[482] Fix | Delete
def main():
[483] Fix | Delete
'''Main function for script'''
[484] Fix | Delete
[485] Fix | Delete
(
[486] Fix | Delete
sources,
[487] Fix | Delete
show_ptr,
[488] Fix | Delete
numlines,
[489] Fix | Delete
width,
[490] Fix | Delete
show_json,
[491] Fix | Delete
all_users,
[492] Fix | Delete
multilogs,
[493] Fix | Delete
) = parse_args()
[494] Fix | Delete
[495] Fix | Delete
# On shared servers, limit the number of log files searched
[496] Fix | Delete
if any(shared_type in hostname() for shared_type in ["biz", "hub", "res"]):
[497] Fix | Delete
log_limit = MAX_LOGS_SHARED
[498] Fix | Delete
else:
[499] Fix | Delete
12
It is recommended that you Edit text format, this type of Fix handles quite a lot in one request
Function