mirror of
https://github.com/lucaspalomodevelop/core.git
synced 2026-03-13 16:14:40 +00:00
(ids) work in progress backend scripts
This commit is contained in:
parent
0f7cc6f9fe
commit
bcae82580d
@ -29,34 +29,45 @@
|
||||
"""
|
||||
import os
|
||||
|
||||
def reverse_log_reader(filename, block_size = 8192):
|
||||
def reverse_log_reader(filename, block_size = 8192, start_pos=None):
|
||||
""" read log file in reverse order
|
||||
:param filename: filename to parse
|
||||
:param block_size: max block size to examine per loop
|
||||
:param start_pos: start at position in file (None is end of file)
|
||||
:return: generator
|
||||
"""
|
||||
with open(filename,'rU') as f_in:
|
||||
f_in.seek(0, os.SEEK_END)
|
||||
file_byte_start = f_in.tell()
|
||||
if start_pos is None:
|
||||
f_in.seek(0, os.SEEK_END)
|
||||
file_byte_start = f_in.tell()
|
||||
else:
|
||||
file_byte_start = start_pos
|
||||
|
||||
data = ''
|
||||
while True:
|
||||
if file_byte_start-block_size < 0:
|
||||
block_size = block_size - file_byte_start
|
||||
block_size = file_byte_start
|
||||
file_byte_start = 0
|
||||
else:
|
||||
file_byte_start -= block_size
|
||||
|
||||
f_in.seek(file_byte_start)
|
||||
data = f_in.read(block_size) + data
|
||||
|
||||
data = f_in.read(block_size) + data
|
||||
eol = data.rfind('\n')
|
||||
|
||||
while eol > -1:
|
||||
line_end = file_byte_start + len(data)
|
||||
line = data[eol:]
|
||||
data = data[:eol]
|
||||
eol = data.rfind('\n')
|
||||
yield line.strip()
|
||||
# field line and position in file
|
||||
yield {'line':line.strip(),'pos':line_end}
|
||||
if file_byte_start == 0 and eol == -1:
|
||||
# flush last line
|
||||
yield {'line':data.strip(),'pos':len(data)}
|
||||
|
||||
if file_byte_start == 0:
|
||||
break
|
||||
|
||||
|
||||
|
||||
@ -70,19 +70,25 @@ for filter in shlex.split(parameters['filter']):
|
||||
#del data_filters[filterField]
|
||||
data_filters_comp[filterField] = re.compile('.*')
|
||||
|
||||
|
||||
# filter one specific log line
|
||||
if 'filepos' in data_filters and data_filters['filepos'].isdigit():
|
||||
log_start_pos = int(data_filters['filepos'])
|
||||
else:
|
||||
log_start_pos = None
|
||||
|
||||
# query suricata eve log
|
||||
result = {'filters':data_filters,'rows':[],'total_rows':0}
|
||||
for line in reverse_log_reader(filename=suricata_log):
|
||||
for line in reverse_log_reader(filename=suricata_log, start_pos=log_start_pos):
|
||||
try:
|
||||
record = ujson.loads(line)
|
||||
record = ujson.loads(line['line'])
|
||||
except ValueError:
|
||||
# can not handle line
|
||||
record = {}
|
||||
|
||||
# only process valid alert items
|
||||
if 'alert' in record:
|
||||
# add position in file
|
||||
record['filepos'] = line['pos']
|
||||
# flatten structure
|
||||
record['alert_sid'] = record['alert']['signature_id']
|
||||
record['alert'] = record['alert']['signature']
|
||||
@ -102,5 +108,9 @@ for line in reverse_log_reader(filename=suricata_log):
|
||||
if (len(result['rows']) < limit or limit == 0) and result['total_rows'] >= offset:
|
||||
result['rows'].append(record)
|
||||
|
||||
# only try to fetch one line when filepos is given
|
||||
if log_start_pos != None:
|
||||
break
|
||||
|
||||
# output results
|
||||
print(ujson.dumps(result))
|
||||
Loading…
x
Reference in New Issue
Block a user