Small optimisation of feed_switch

This commit is contained in:
Geoffrey Frogeye 2019-12-15 17:12:44 +01:00
parent 45325782d2
commit 7af2074c7a
Signed by: geoffrey
GPG key ID: D8A7ECA00A8CD3DD

View file

@ -8,20 +8,19 @@ import typing
import multiprocessing import multiprocessing
import enum import enum
RecordType = enum.Enum('RecordType', 'A AAAA CNAME PTR') Record = typing.Tuple[typing.Callable, typing.Callable, int, str, str]
Record = typing.Tuple[RecordType, int, str, str]
# select, write # select, write
FUNCTION_MAP: typing.Any = { FUNCTION_MAP: typing.Any = {
RecordType.A: ( 'a': (
database.Database.get_ip4, database.Database.get_ip4,
database.Database.set_hostname, database.Database.set_hostname,
), ),
RecordType.CNAME: ( 'cname': (
database.Database.get_domain, database.Database.get_domain,
database.Database.set_hostname, database.Database.set_hostname,
), ),
RecordType.PTR: ( 'ptr': (
database.Database.get_domain, database.Database.get_domain,
database.Database.set_ip4address, database.Database.set_ip4address,
), ),
@ -47,10 +46,9 @@ class Writer(multiprocessing.Process):
record: Record record: Record
for record in block: for record in block:
rtype, updated, name, value = record select, write, updated, name, value = record
self.db.enter_step('feed_switch') self.db.enter_step('feed_switch')
select, write = FUNCTION_MAP[rtype]
for source in select(self.db, value): for source in select(self.db, value):
# write(self.db, name, updated, source=source) # write(self.db, name, updated, source=source)
write(self.db, name, updated) write(self.db, name, updated)
@ -94,13 +92,6 @@ class Parser():
class Rapid7Parser(Parser): class Rapid7Parser(Parser):
TYPES = {
'a': RecordType.A,
'aaaa': RecordType.AAAA,
'cname': RecordType.CNAME,
'ptr': RecordType.PTR,
}
def consume(self) -> None: def consume(self) -> None:
data = dict() data = dict()
for line in self.buf: for line in self.buf:
@ -112,8 +103,10 @@ class Rapid7Parser(Parser):
val = split[k+2] val = split[k+2]
data[key] = val data[key] = val
select, writer = FUNCTION_MAP[data['type']]
record = ( record = (
Rapid7Parser.TYPES[data['type']], select,
writer,
int(data['timestamp']), int(data['timestamp']),
data['name'], data['name'],
data['value'] data['value']
@ -125,9 +118,9 @@ class DnsMassParser(Parser):
# dnsmass --output Snrql # dnsmass --output Snrql
# --retry REFUSED,SERVFAIL --resolvers nameservers-ipv4 # --retry REFUSED,SERVFAIL --resolvers nameservers-ipv4
TYPES = { TYPES = {
'A': (RecordType.A, -1, None), 'A': (FUNCTION_MAP['a'][0], FUNCTION_MAP['a'][1], -1, None),
'AAAA': (RecordType.AAAA, -1, None), # 'AAAA': (FUNCTION_MAP['aaaa'][0], FUNCTION_MAP['aaaa'][1], -1, None),
'CNAME': (RecordType.CNAME, -1, -1), 'CNAME': (FUNCTION_MAP['cname'][0], FUNCTION_MAP['cname'][1], -1, -1),
} }
def consume(self) -> None: def consume(self) -> None:
@ -146,10 +139,11 @@ class DnsMassParser(Parser):
timestamp = int(split[1]) timestamp = int(split[1])
header = False header = False
else: else:
dtype, name_offset, value_offset = \ select, write, name_offset, value_offset = \
DnsMassParser.TYPES[split[1]] DnsMassParser.TYPES[split[1]]
record = ( record = (
dtype, select,
write,
timestamp, timestamp,
split[0][:name_offset], split[0][:name_offset],
split[2][:value_offset], split[2][:value_offset],