mirror of
https://github.com/lucaspalomodevelop/core.git
synced 2026-03-13 08:09:41 +00:00
configd: extend log query mechanism with streaming capability
This commit is contained in:
parent
21df35228c
commit
c522ecac65
2
plist
2
plist
@ -1171,9 +1171,11 @@
|
||||
/usr/local/opnsense/scripts/syslog/list_applications.php
|
||||
/usr/local/opnsense/scripts/syslog/lockout_handler
|
||||
/usr/local/opnsense/scripts/syslog/log_archive
|
||||
/usr/local/opnsense/scripts/syslog/log_matcher.py
|
||||
/usr/local/opnsense/scripts/syslog/logformats/__init__.py
|
||||
/usr/local/opnsense/scripts/syslog/logformats/syslog.py
|
||||
/usr/local/opnsense/scripts/syslog/queryLog.py
|
||||
/usr/local/opnsense/scripts/syslog/streamLog.py
|
||||
/usr/local/opnsense/scripts/system/activity.py
|
||||
/usr/local/opnsense/scripts/system/certctl.py
|
||||
/usr/local/opnsense/scripts/system/cpu.py
|
||||
|
||||
137
src/opnsense/scripts/syslog/log_matcher.py
Normal file
137
src/opnsense/scripts/syslog/log_matcher.py
Normal file
@ -0,0 +1,137 @@
|
||||
#!/usr/local/bin/python3
|
||||
|
||||
"""
|
||||
Copyright (c) 2024 Deciso B.V.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
import os
|
||||
import glob
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
from logformats import FormatContainer, BaseLogFormat
|
||||
sys.path.insert(0, "/usr/local/opnsense/site-python")
|
||||
from log_helper import reverse_log_reader
|
||||
|
||||
class LogMatcher:
|
||||
def __init__(self, filter, filename, module, severity):
|
||||
try:
|
||||
self.filter = filter.replace('*', '.*').lower()
|
||||
if self.filter.find('*') == -1:
|
||||
# no wildcard operator, assume partial match
|
||||
self.filter = ".*%s.*" % filter
|
||||
self.filter_regexp = re.compile(self.filter)
|
||||
except re.error:
|
||||
# remove illegal expression
|
||||
self.filter_regexp = re.compile('.*')
|
||||
|
||||
self.filename = filename
|
||||
self.log_filenames = self.fetch_log_filenames(filename, module)
|
||||
self.severity = severity.split(',') if severity.strip() != '' else []
|
||||
self.row_number = 0
|
||||
|
||||
def live_match_records(self):
|
||||
# row number does not make sense anymore, set it to 0
|
||||
self.row_number = 0
|
||||
latest = "/var/log/%s/latest.log" % os.path.basename(self.filename)
|
||||
if not os.path.exists(latest):
|
||||
latest = self.log_filenames[0] if len(self.log_filenames) > 0 else None
|
||||
if os.path.exists(latest):
|
||||
format_container = FormatContainer(latest)
|
||||
p = subprocess.Popen(['tail', '-f', '-n 0', latest], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, bufsize=0)
|
||||
try:
|
||||
for line in iter(p.stdout.readline, b''):
|
||||
line = line.decode()
|
||||
if line != "" and self.filter_regexp.match(('%s' % line).lower()):
|
||||
record = self.parse_line(line, format_container)
|
||||
if len(self.severity) == 0 or record['severity'] is None or record['severity'] in self.severity:
|
||||
yield record
|
||||
except KeyboardInterrupt:
|
||||
p.terminate()
|
||||
finally:
|
||||
p.terminate()
|
||||
|
||||
def match_records(self):
|
||||
for filename in self.log_filenames:
|
||||
if os.path.exists(filename):
|
||||
format_container = FormatContainer(filename)
|
||||
for rec in reverse_log_reader(filename):
|
||||
self.row_number += 1
|
||||
if rec['line'] != "" and self.filter_regexp.match(('%s' % rec['line']).lower()):
|
||||
record = self.parse_line(rec['line'], format_container)
|
||||
if len(self.severity) == 0 or record['severity'] is None or record['severity'] in self.severity:
|
||||
yield record
|
||||
|
||||
def parse_line(self, line, format_container):
|
||||
frmt = format_container.get_format(line)
|
||||
record = {
|
||||
'timestamp': None,
|
||||
'parser': None,
|
||||
'facility': 1,
|
||||
'severity': None,
|
||||
'process_name': '',
|
||||
'pid': None,
|
||||
'rnum': self.row_number
|
||||
}
|
||||
if frmt:
|
||||
if issubclass(frmt.__class__, BaseLogFormat):
|
||||
# backwards compatibility, old style log handler
|
||||
record['timestamp'] = frmt.timestamp(line)
|
||||
record['process_name'] = frmt.process_name(line)
|
||||
record['line'] = frmt.line(line)
|
||||
record['parser'] = frmt.name
|
||||
else:
|
||||
record['timestamp'] = frmt.timestamp
|
||||
record['process_name'] = frmt.process_name
|
||||
record['pid'] = frmt.pid
|
||||
record['facility'] = frmt.facility
|
||||
record['severity'] = frmt.severity_str
|
||||
record['line'] = frmt.line
|
||||
record['parser'] = frmt.name
|
||||
else:
|
||||
record['line'] = line
|
||||
|
||||
return record
|
||||
|
||||
@staticmethod
|
||||
def fetch_log_filenames(filename, module):
|
||||
log_filenames = list()
|
||||
if module == 'core':
|
||||
log_basename = "/var/log/%s" % os.path.basename(filename)
|
||||
else:
|
||||
log_basename = "/var/log/%s/%s" % (
|
||||
os.path.basename(module), os.path.basename(filename)
|
||||
)
|
||||
if os.path.isdir(log_basename):
|
||||
# new syslog-ng local targets use an extra directory level
|
||||
filenames = glob.glob("%s/%s_*.log" % (log_basename, log_basename.split('/')[-1].split('.')[0]))
|
||||
for filename in sorted(filenames, reverse=True):
|
||||
log_filenames.append(filename)
|
||||
# legacy log output is always stashed last
|
||||
log_filenames.append("%s.log" % log_basename)
|
||||
if module != 'core':
|
||||
log_filenames.append("/var/log/%s_%s.log" % (module, os.path.basename(filename)))
|
||||
|
||||
return log_filenames
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
"""
|
||||
Copyright (c) 2019-2020 Ad Schellevis <ad@opnsense.org>
|
||||
Copyright (c) 2024 Deciso B.V.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
@ -30,15 +31,9 @@
|
||||
query log files
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
import ujson
|
||||
import datetime
|
||||
import glob
|
||||
from logformats import FormatContainer, BaseLogFormat
|
||||
sys.path.insert(0, "/usr/local/opnsense/site-python")
|
||||
from log_helper import reverse_log_reader
|
||||
from log_matcher import LogMatcher
|
||||
import argparse
|
||||
|
||||
if __name__ == '__main__':
|
||||
@ -55,81 +50,19 @@ if __name__ == '__main__':
|
||||
|
||||
result = {'filters': inputargs.filter, 'rows': [], 'total_rows': 0, 'origin': os.path.basename(inputargs.filename)}
|
||||
if inputargs.filename != "":
|
||||
log_filenames = list()
|
||||
if inputargs.module == 'core':
|
||||
log_basename = "/var/log/%s" % os.path.basename(inputargs.filename)
|
||||
else:
|
||||
log_basename = "/var/log/%s/%s" % (
|
||||
os.path.basename(inputargs.module), os.path.basename(inputargs.filename)
|
||||
)
|
||||
if os.path.isdir(log_basename):
|
||||
# new syslog-ng local targets use an extra directory level
|
||||
filenames = glob.glob("%s/%s_*.log" % (log_basename, log_basename.split('/')[-1].split('.')[0]))
|
||||
for filename in sorted(filenames, reverse=True):
|
||||
log_filenames.append(filename)
|
||||
# legacy log output is always stashed last
|
||||
log_filenames.append("%s.log" % log_basename)
|
||||
if inputargs.module != 'core':
|
||||
log_filenames.append("/var/log/%s_%s.log" % (inputargs.module, os.path.basename(inputargs.filename)))
|
||||
|
||||
limit = int(inputargs.limit) if inputargs.limit.isdigit() else 0
|
||||
offset = int(inputargs.offset) if inputargs.offset.isdigit() else 0
|
||||
severity = inputargs.severity.split(',') if inputargs.severity.strip() != '' else []
|
||||
try:
|
||||
filter = inputargs.filter.replace('*', '.*').lower()
|
||||
if filter.find('*') == -1:
|
||||
# no wildcard operator, assume partial match
|
||||
filter = ".*%s.*" % filter
|
||||
filter_regexp = re.compile(filter)
|
||||
except re.error:
|
||||
# remove illegal expression
|
||||
filter_regexp = re.compile('.*')
|
||||
|
||||
row_number = 0
|
||||
for filename in log_filenames:
|
||||
if os.path.exists(filename):
|
||||
format_container = FormatContainer(filename)
|
||||
for rec in reverse_log_reader(filename):
|
||||
row_number += 1
|
||||
if rec['line'] != "" and filter_regexp.match(('%s' % rec['line']).lower()):
|
||||
frmt = format_container.get_format(rec['line'])
|
||||
record = {
|
||||
'timestamp': None,
|
||||
'parser': None,
|
||||
'facility': 1,
|
||||
'severity': None,
|
||||
'process_name': '',
|
||||
'pid': None,
|
||||
'rnum': row_number
|
||||
}
|
||||
if frmt:
|
||||
if issubclass(frmt.__class__, BaseLogFormat):
|
||||
# backwards compatibility, old style log handler
|
||||
record['timestamp'] = frmt.timestamp(rec['line'])
|
||||
record['process_name'] = frmt.process_name(rec['line'])
|
||||
record['line'] = frmt.line(rec['line'])
|
||||
record['parser'] = frmt.name
|
||||
else:
|
||||
record['timestamp'] = frmt.timestamp
|
||||
record['process_name'] = frmt.process_name
|
||||
record['pid'] = frmt.pid
|
||||
record['facility'] = frmt.facility
|
||||
record['severity'] = frmt.severity_str
|
||||
record['line'] = frmt.line
|
||||
record['parser'] = frmt.name
|
||||
else:
|
||||
record['line'] = rec['line']
|
||||
if len(severity) == 0 or record['severity'] is None or record['severity'] in severity:
|
||||
result['total_rows'] += 1
|
||||
if (len(result['rows']) < limit or limit == 0) and result['total_rows'] >= offset:
|
||||
if inputargs.output == 'json':
|
||||
result['rows'].append(record)
|
||||
else:
|
||||
print("%(timestamp)s\t%(severity)s\t%(process_name)s\t%(line)s" % record)
|
||||
elif limit > 0 and result['total_rows'] > offset + limit:
|
||||
# do not fetch data until end of file...
|
||||
break
|
||||
log_matcher = LogMatcher(inputargs.filter, inputargs.filename, inputargs.module, inputargs.severity)
|
||||
for record in log_matcher.match_records():
|
||||
result['total_rows'] += 1
|
||||
if (len(result['rows']) < limit or limit == 0) and (result['total_rows'] >= offset):
|
||||
if inputargs.output == 'json':
|
||||
result['rows'].append(record)
|
||||
else:
|
||||
print("%(timestamp)s\t%(severity)s\t%(process_name)s\t%(line)s" % record)
|
||||
if limit > 0 and result['total_rows'] > offset + limit:
|
||||
# do not fetch data until end of file...
|
||||
break
|
||||
|
||||
# output results (when json)
|
||||
|
||||
64
src/opnsense/scripts/syslog/streamLog.py
Executable file
64
src/opnsense/scripts/syslog/streamLog.py
Executable file
@ -0,0 +1,64 @@
|
||||
#!/usr/local/bin/python3
|
||||
|
||||
"""
|
||||
Copyright (c) 2024 Deciso B.V.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------------
|
||||
|
||||
stream log file
|
||||
"""
|
||||
|
||||
import ujson
|
||||
import argparse
|
||||
from collections import deque
|
||||
from log_matcher import LogMatcher
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--filter', help='filter results', default='')
|
||||
parser.add_argument('--offset', help='include last N matching lines', default='')
|
||||
parser.add_argument('--filename', help='log file name (excluding .log extension)', default='')
|
||||
parser.add_argument('--module', help='module', default='core')
|
||||
parser.add_argument('--severity', help='comma separated list of severities', default='')
|
||||
inputargs = parser.parse_args()
|
||||
|
||||
result = deque()
|
||||
if inputargs.filename == "":
|
||||
exit
|
||||
|
||||
offset = int(inputargs.offset) if inputargs.offset.isdigit() else 0
|
||||
|
||||
log_matcher = LogMatcher(inputargs.filter, inputargs.filename, inputargs.module, inputargs.severity)
|
||||
if offset > 0:
|
||||
for record in log_matcher.match_records():
|
||||
if len(result) >= offset:
|
||||
break
|
||||
result.appendleft(f"event: message\ndata:{ujson.dumps(record)}\n\n")
|
||||
|
||||
for record in result:
|
||||
print(record, flush=True)
|
||||
|
||||
for record in log_matcher.live_match_records():
|
||||
print(f"event: message\ndata:{ujson.dumps(record)}\n\n")
|
||||
@ -16,6 +16,12 @@ parameters:--limit %s --offset %s --filter %s --module %s --filename %s --sever
|
||||
type:stream_output
|
||||
message:Stream log
|
||||
|
||||
[diag.log_live]
|
||||
command:/usr/local/opnsense/scripts/syslog/streamLog.py
|
||||
parameters:--offset %s --filter %s --module %s --filename %s --severity %s
|
||||
type:stream_output
|
||||
message:Livestream log
|
||||
|
||||
[diag.disk]
|
||||
command:/bin/df -ahT --libxo json
|
||||
parameters:
|
||||
|
||||
@ -25,10 +25,6 @@
|
||||
"""
|
||||
|
||||
import os
|
||||
import mmap
|
||||
from io import StringIO
|
||||
import struct
|
||||
|
||||
|
||||
def reverse_log_reader(filename, block_size=81920, start_pos=None):
|
||||
""" read log file in reverse order
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user