Network insight, some 2to3 magic, closes https://github.com/opnsense/core/issues/2964

This commit is contained in:
Ad Schellevis 2018-11-29 13:08:12 +01:00
parent c95c82cb87
commit 2481b62ca8
4 changed files with 10 additions and 10 deletions

View File

@ -69,7 +69,7 @@ if valid_params:
column_names = dict()
for record in obj.get_data(start_time, end_time):
if rownum == 0:
column_names = record.keys()
column_names = list(record.keys())
# dump heading
print (','.join(column_names))
line = list()

View File

@ -202,14 +202,14 @@ if __name__ == '__main__':
if cmd_args.profile:
# start with profiling
import cProfile
import StringIO
import io
import pstats
pr = cProfile.Profile(builtins=False)
pr.enable()
Main()
pr.disable()
s = StringIO.StringIO()
s = io.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
ps.print_stats()

View File

@ -42,9 +42,9 @@ def convert_timestamp(val):
else:
datepart = val
timepart = "0:0:0,0"
year, month, day = map(int, datepart.split("-"))
year, month, day = list(map(int, datepart.split("-")))
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
hours, minutes, seconds = list(map(int, timepart_full[0].split(":")))
if len(timepart_full) == 2:
microseconds = int('{:0<6.6}'.format(timepart_full[1].decode()))
else:

View File

@ -78,10 +78,10 @@ class BaseFlowAggregator(object):
tmp = 'update timeserie set last_seen = :flow_end, '
tmp += 'octets = octets + :octets_consumed, packets = packets + :packets_consumed '
tmp += 'where mtime = :mtime and %s '
self._update_stmt = tmp % (' and '.join(map(lambda x: '%s = :%s' % (x, x), self.agg_fields)))
self._update_stmt = tmp % (' and '.join(['%s = :%s' % (x, x) for x in self.agg_fields]))
tmp = 'insert into timeserie (mtime, last_seen, octets, packets, %s) '
tmp += 'values (:mtime, :flow_end, :octets_consumed, :packets_consumed, %s)'
self._insert_stmt = tmp % (','.join(self.agg_fields), ','.join(map(lambda x: ':%s' % x, self.agg_fields)))
self._insert_stmt = tmp % (','.join(self.agg_fields), ','.join([':%s' % x for x in self.agg_fields]))
# open database
self._open_db()
self._fetch_known_targets()
@ -257,7 +257,7 @@ class BaseFlowAggregator(object):
cur.execute(sql_select, {'start_time': self._parse_timestamp(start_time),
'end_time': self._parse_timestamp(end_time)})
#
field_names = (map(lambda x: x[0], cur.description))
field_names = ([x[0] for x in cur.description])
for record in cur.fetchall():
result_record = dict()
for field_indx in range(len(field_names)):
@ -321,7 +321,7 @@ class BaseFlowAggregator(object):
cur.execute(sql_select, query_params)
# fetch all data, to a max of [max_hits] rows.
field_names = (map(lambda x: x[0], cur.description))
field_names = ([x[0] for x in cur.description])
for record in cur.fetchall():
result_record = dict()
for field_indx in range(len(field_names)):
@ -360,7 +360,7 @@ class BaseFlowAggregator(object):
cur.execute(sql_select, query_params)
# fetch all data, to a max of [max_hits] rows.
field_names = (map(lambda x: x[0], cur.description))
field_names = ([x[0] for x in cur.description])
while True:
record = cur.fetchone()
if record is None: