mirror of
https://github.com/lucaspalomodevelop/core.git
synced 2026-03-15 00:54:41 +00:00
(network insight) add (raw) data export
This commit is contained in:
parent
9cc65c1965
commit
2bc8e64bb0
@ -169,6 +169,24 @@ class NetworkinsightController extends ApiControllerBase
|
||||
return array();
|
||||
}
|
||||
|
||||
/**
|
||||
* get metadata from backend aggregation process
|
||||
* @return array timeseries
|
||||
*/
|
||||
public function getMetadataAction()
|
||||
{
|
||||
if ($this->request->isGet()) {
|
||||
$backend = new Backend();
|
||||
$configd_cmd = "netflow aggregate metadata json";
|
||||
$response = $backend->configdRun($configd_cmd);
|
||||
$metadata = json_decode($response, true);
|
||||
if ($metadata != null) {
|
||||
return $metadata;
|
||||
}
|
||||
}
|
||||
return array();
|
||||
}
|
||||
|
||||
/**
|
||||
* return interface map (device / name)
|
||||
* @return array interfaces
|
||||
@ -220,4 +238,34 @@ class NetworkinsightController extends ApiControllerBase
|
||||
}
|
||||
return $result;
|
||||
}
|
||||
|
||||
/**
|
||||
* request timeserie data to use for reporting
|
||||
* @param string $provider provider class name
|
||||
* @param string $from_date from timestamp
|
||||
* @param string $to_date to timestamp
|
||||
* @param string $resolution resolution in seconds
|
||||
* @return string csv output
|
||||
*/
|
||||
public function exportAction(
|
||||
$provider = null,
|
||||
$from_date = null,
|
||||
$to_date = null,
|
||||
$resolution = null
|
||||
) {
|
||||
$this->response->setContentType('application/CSV', 'UTF-8');
|
||||
$this->response->setHeader(
|
||||
'Content-Disposition:',
|
||||
"Attachment; filename=\"" . $provider . ".csv\""
|
||||
);
|
||||
if ($this->request->isGet()) {
|
||||
$backend = new Backend();
|
||||
$configd_cmd = "netflow aggregate export {$provider} {$from_date} {$to_date} {$resolution}" ;
|
||||
$response = $backend->configdRun($configd_cmd);
|
||||
return $response;
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,7 +75,27 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
// fetch service names
|
||||
ajaxGet('/api/diagnostics/networkinsight/getServices',{}, function(services, status) {
|
||||
service_names = services;
|
||||
// return promise, no need to wait for getMetadata
|
||||
dfObj.resolve();
|
||||
// fetch aggregators
|
||||
ajaxGet('/api/diagnostics/networkinsight/getMetadata',{}, function(metadata, status) {
|
||||
Object.keys(metadata['aggregators']).forEach(function (agg_name) {
|
||||
var res = metadata['aggregators'][agg_name]['resolutions'].join(',');
|
||||
$("#export_collection").append($("<option data-resolutions='"+res+"'/>").val(agg_name).text(agg_name));
|
||||
});
|
||||
$("#export_collection").change(function(){
|
||||
//alert($(this).find('option:selected').data('resolutions'));
|
||||
$("#export_resolution").html("");
|
||||
var resolutions = String($(this).find('option:selected').data('resolutions'));
|
||||
resolutions.split(',').map(function(item) {
|
||||
$("#export_resolution").append($("<option/>").val(item).text(item));
|
||||
console.log(item);
|
||||
});
|
||||
$("#export_resolution").selectpicker('refresh');
|
||||
});
|
||||
$("#export_collection").change();
|
||||
$("#export_collection").selectpicker('refresh');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -227,27 +247,27 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
.valueFormat(d3.format(',.2s'));
|
||||
;
|
||||
|
||||
chart_data = [];
|
||||
data.map(function(item){
|
||||
var label = "(other)";
|
||||
var proto = "";
|
||||
if (item.protocol != "") {
|
||||
if (item.protocol in protocol_names) {
|
||||
proto = ' (' + protocol_names[item.protocol] + ')';
|
||||
}
|
||||
if (item.dst_port in service_names) {
|
||||
label = service_names[item.dst_port];
|
||||
} else {
|
||||
label = item.dst_port
|
||||
}
|
||||
}
|
||||
chart_data.push({'label': label + proto, 'value': item.total});
|
||||
});
|
||||
chart_data = [];
|
||||
data.map(function(item){
|
||||
var label = "(other)";
|
||||
var proto = "";
|
||||
if (item.protocol != "") {
|
||||
if (item.protocol in protocol_names) {
|
||||
proto = ' (' + protocol_names[item.protocol] + ')';
|
||||
}
|
||||
if (item.dst_port in service_names) {
|
||||
label = service_names[item.dst_port];
|
||||
} else {
|
||||
label = item.dst_port
|
||||
}
|
||||
}
|
||||
chart_data.push({'label': label + proto, 'value': item.total});
|
||||
});
|
||||
|
||||
d3.select("#chart_top_ports svg")
|
||||
.datum(chart_data)
|
||||
.transition().duration(350)
|
||||
.call(chart);
|
||||
var diag = d3.select("#chart_top_ports svg")
|
||||
.datum(chart_data)
|
||||
.transition().duration(350)
|
||||
.call(chart);
|
||||
pageCharts["chart_top_ports"] = chart;
|
||||
|
||||
// copy selection to detail page and query results
|
||||
@ -295,19 +315,19 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
.legendPosition("right")
|
||||
.valueFormat(d3.format(',.2s'));
|
||||
|
||||
chart_data = [];
|
||||
data.map(function(item){
|
||||
var label = "(other)";
|
||||
if (item.src_addr != "") {
|
||||
label = item.src_addr;
|
||||
}
|
||||
chart_data.push({'label': label, 'value': item.total});
|
||||
});
|
||||
chart_data = [];
|
||||
data.map(function(item){
|
||||
var label = "(other)";
|
||||
if (item.src_addr != "") {
|
||||
label = item.src_addr;
|
||||
}
|
||||
chart_data.push({'label': label, 'value': item.total});
|
||||
});
|
||||
|
||||
d3.select("#chart_top_sources svg")
|
||||
.datum(chart_data)
|
||||
.transition().duration(350)
|
||||
.call(chart);
|
||||
d3.select("#chart_top_sources svg")
|
||||
.datum(chart_data)
|
||||
.transition().duration(350)
|
||||
.call(chart);
|
||||
pageCharts["chart_top_sources"] = chart;
|
||||
|
||||
// copy selection to detail tab and query results
|
||||
@ -408,6 +428,21 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* export detailed data (generate download link and click)
|
||||
*/
|
||||
function export_flow_data()
|
||||
{
|
||||
var time_url = $("#export_date_from").val() + '/' + $("#export_date_to").val();
|
||||
var url = '/api/diagnostics/networkinsight/export/'+$("#export_collection").val()+'/'+time_url+'/'+$("#export_resolution").val();
|
||||
var link = document.createElement("a");
|
||||
$(link).click(function(e) {
|
||||
e.preventDefault();
|
||||
window.location.href = url;
|
||||
});
|
||||
$(link).click();
|
||||
}
|
||||
|
||||
// hide heading
|
||||
$(".page-content-head").addClass("hidden");
|
||||
|
||||
@ -472,17 +507,26 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
to_date_ts = parseInt((date_end - (24*60*60*1000 * i)) / 1000);
|
||||
tmp_date = new Date(from_date_ts*1000);
|
||||
tmp = tmp_date.getDate() + '/' + (tmp_date.getMonth()+1) + '/' + tmp_date.getFullYear();
|
||||
$("#date_detail_from").append($("<option/>").val(from_date_ts).text(tmp));
|
||||
$("#date_detail_to").append($("<option/>").val(to_date_ts).text(tmp));
|
||||
if (i < 62) {
|
||||
$("#date_detail_from").append($("<option/>").val(from_date_ts).text(tmp));
|
||||
$("#date_detail_to").append($("<option/>").val(to_date_ts).text(tmp));
|
||||
}
|
||||
$("#export_date_from").append($("<option/>").val(from_date_ts).text(tmp));
|
||||
$("#export_date_to").append($("<option/>").val(to_date_ts).text(tmp));
|
||||
|
||||
}
|
||||
|
||||
$("#date_detail_from").selectpicker('refresh');
|
||||
$("#date_detail_to").selectpicker('refresh');
|
||||
$("#date_detail_from").change(function(){
|
||||
// change to date on change from date.
|
||||
$("#date_detail_to").prop('selectedIndex', $("#date_detail_from").prop('selectedIndex'));
|
||||
$("#date_detail_to").selectpicker('refresh');
|
||||
if ($("#date_detail_to").prop('selectedIndex') > $("#date_detail_from").prop('selectedIndex')) {
|
||||
$("#date_detail_to").prop('selectedIndex', $("#date_detail_from").prop('selectedIndex'));
|
||||
$("#date_detail_to").selectpicker('refresh');
|
||||
}
|
||||
});
|
||||
$("#export_date_from").selectpicker('refresh');
|
||||
$("#export_date_to").selectpicker('refresh');
|
||||
|
||||
chart_interface_totals();
|
||||
chart_top_dst_port_usage();
|
||||
@ -491,6 +535,7 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
$("#refresh_details").click(grid_details);
|
||||
$("#export_btn").click(export_flow_data);
|
||||
|
||||
});
|
||||
</script>
|
||||
@ -498,6 +543,7 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
<ul class="nav nav-tabs" data-tabs="tabs" id="maintabs">
|
||||
<li class="active"><a data-toggle="tab" id="totals_tab" href="#totals">{{ lang._('Totals') }}</a></li>
|
||||
<li><a data-toggle="tab" id="details_tab" href="#details">{{ lang._('Details') }}</a></li>
|
||||
<li><a data-toggle="tab" id="export_tab" href="#export">{{ lang._('Export') }}</a></li>
|
||||
</ul>
|
||||
<div class="tab-content content-box tab-content" style="padding: 10px;">
|
||||
<div id="totals" class="tab-pane fade in active">
|
||||
@ -605,4 +651,49 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
</tfoot>
|
||||
</table>
|
||||
</div>
|
||||
<div id="export" class="tab-pane fade in">
|
||||
<br/>
|
||||
<table class="table table-condensed table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>{{ lang._('Attribute') }}</th>
|
||||
<th>{{ lang._('Value') }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{{ lang._('Collection') }}</td>
|
||||
<td>
|
||||
<select class="selectpicker" id="export_collection">
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{{ lang._('Resolution (seconds)') }}</td>
|
||||
<td>
|
||||
<select class="selectpicker" id="export_resolution">
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{{ lang._('From date') }}</td>
|
||||
<td>
|
||||
<select class="selectpicker" id="export_date_from" data-live-search="true" data-size="10"></select>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{{ lang._('To date') }}</td>
|
||||
<td>
|
||||
<select class="selectpicker" id="export_date_to" data-live-search="true" data-size="10"></select>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>
|
||||
<button id="export_btn" class="btn btn-default btn-xs"><i class="fa fa-cloud-download"></i> {{ lang._('Export')}}</button>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
97
src/opnsense/scripts/netflow/export_details.py
Executable file
97
src/opnsense/scripts/netflow/export_details.py
Executable file
@ -0,0 +1,97 @@
|
||||
#!/usr/local/bin/python2.7
|
||||
|
||||
"""
|
||||
Copyright (c) 2016 Ad Schellevis
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------------
|
||||
fetch detailed data from provider for specified timeserie
|
||||
"""
|
||||
import time
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import ujson
|
||||
sys.path.insert(0, "/usr/local/opnsense/site-python")
|
||||
from lib.parse import parse_flow
|
||||
from lib.aggregate import BaseFlowAggregator
|
||||
import lib.aggregates
|
||||
import params
|
||||
|
||||
|
||||
app_params = {'start_time': '0',
|
||||
'end_time': '1461251783',
|
||||
'resolution': '300',
|
||||
'provider': 'FlowSourceAddrTotals'
|
||||
}
|
||||
params.update_params(app_params)
|
||||
|
||||
# handle input parameters
|
||||
valid_params = False
|
||||
if app_params['start_time'].isdigit():
|
||||
start_time = int(app_params['start_time'])
|
||||
if app_params['end_time'].isdigit():
|
||||
end_time = int(app_params['end_time'])
|
||||
if app_params['resolution'].isdigit():
|
||||
resolution = int(app_params['resolution'])
|
||||
valid_params = True
|
||||
|
||||
if valid_params:
|
||||
for agg_class in lib.aggregates.get_aggregators():
|
||||
if app_params['provider'] == agg_class.__name__:
|
||||
if resolution in agg_class.resolutions():
|
||||
# found provider and resolution, start spooling data
|
||||
obj = agg_class(resolution)
|
||||
rownum=0
|
||||
column_names = dict()
|
||||
for record in obj.get_data(start_time, end_time):
|
||||
if rownum == 0:
|
||||
column_names = record.keys()
|
||||
# dump heading
|
||||
print (','.join(column_names))
|
||||
line = list()
|
||||
for item in column_names:
|
||||
if not record[item]:
|
||||
line.append("")
|
||||
if type(record[item]) == datetime.datetime:
|
||||
line.append('%s+00:00'%record[item].strftime('%Y/%m/%d %H:%M:%S'))
|
||||
elif type(record[item]) == float:
|
||||
line.append('%.4f' % record[item])
|
||||
elif type(record[item]) == int:
|
||||
line.append('%d' % record[item])
|
||||
else:
|
||||
line.append(record[item])
|
||||
print (','.join(line))
|
||||
rownum += 1
|
||||
else:
|
||||
print ('missing parameters :')
|
||||
tmp = list()
|
||||
for key in app_params:
|
||||
tmp.append('/%s %s' % (key, app_params[key]))
|
||||
print (' %s %s'%(sys.argv[0], ' '.join(tmp)))
|
||||
print ('')
|
||||
print (' resolution : sample rate in seconds')
|
||||
print (' start_time : start time (seconds since epoch)')
|
||||
print (' end_time : end timestamp (seconds since epoch)')
|
||||
print (' provider : data provider classname')
|
||||
@ -31,6 +31,33 @@ import os
|
||||
import datetime
|
||||
import sqlite3
|
||||
|
||||
def convert_timestamp(val):
|
||||
""" convert timestamps from string (internal sqlite type) or seconds since epoch
|
||||
"""
|
||||
if val.find('-') > -1:
|
||||
# formatted date/time
|
||||
if val.find(" ") > -1:
|
||||
datepart, timepart = val.split(" ")
|
||||
else:
|
||||
datepart = val
|
||||
timepart = "0:0:0,0"
|
||||
year, month, day = map(int, datepart.split("-"))
|
||||
timepart_full = timepart.split(".")
|
||||
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
|
||||
if len(timepart_full) == 2:
|
||||
microseconds = int('{:0<6.6}'.format(timepart_full[1].decode()))
|
||||
else:
|
||||
microseconds = 0
|
||||
|
||||
val = datetime.datetime(year, month, day, hours, minutes, seconds, microseconds)
|
||||
else:
|
||||
# timestamp stored as seconds since epoch, convert to utc
|
||||
val = datetime.datetime.utcfromtimestamp(float(val))
|
||||
|
||||
return val
|
||||
|
||||
sqlite3.register_converter('timestamp', convert_timestamp)
|
||||
|
||||
class AggMetadata(object):
|
||||
""" store some metadata needed to keep track of parse progress
|
||||
"""
|
||||
@ -384,3 +411,32 @@ class BaseFlowAggregator(object):
|
||||
cur.close()
|
||||
|
||||
return result
|
||||
|
||||
def get_data(self, start_time, end_time):
|
||||
""" get detail data
|
||||
:param start_time: start timestamp
|
||||
:param end_time: end timestamp
|
||||
:return: iterator
|
||||
"""
|
||||
query_params = {}
|
||||
query_params['start_time'] = self._parse_timestamp((int(start_time/self.resolution))*self.resolution)
|
||||
query_params['end_time'] = self._parse_timestamp(end_time)
|
||||
sql_select = 'select mtime start_time, '
|
||||
sql_select += '%s, octets, packets, last_seen as "last_seen [timestamp]" \n' % ','.join(self.agg_fields)
|
||||
sql_select += 'from timeserie \n'
|
||||
sql_select += 'where mtime >= :start_time and mtime < :end_time\n'
|
||||
cur = self._db_connection.cursor()
|
||||
cur.execute(sql_select, query_params)
|
||||
|
||||
# fetch all data, to a max of [max_hits] rows.
|
||||
field_names = (map(lambda x:x[0], cur.description))
|
||||
while True:
|
||||
record = cur.fetchone()
|
||||
if record is None:
|
||||
break
|
||||
else:
|
||||
result_record=dict()
|
||||
for field_indx in range(len(field_names)):
|
||||
if len(record) > field_indx:
|
||||
result_record[field_names[field_indx]] = record[field_indx]
|
||||
yield result_record
|
||||
|
||||
@ -85,3 +85,9 @@ command:/usr/local/opnsense/scripts/netflow/get_top_usage.py
|
||||
parameters:/provider %s /start_time %s /end_time %s /key_fields %s /value_field %s /filter %s /max_hits %s
|
||||
type:script_output
|
||||
message:request netflow data aggregator top usage for %s
|
||||
|
||||
[aggregate.export]
|
||||
command:/usr/local/opnsense/scripts/netflow/export_details.py
|
||||
parameters:/provider %s /start_time %s /end_time %s /resolution %s
|
||||
type:script_output
|
||||
message:export netflow data aggregator details for %s
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user