Browse Source
Save dupplicates too
Save dupplicates too
Maybe I won't publish them but this will help me for tracking trackers.newworkflow
6 changed files with 33 additions and 164 deletions
-
29database.py
-
8export.py
-
4export_lists.sh
-
5feed_asn.py
-
147feed_dns.old.py
-
4feed_dns.py
@ -1,147 +0,0 @@ |
|||
#!/usr/bin/env python3 |
|||
|
|||
import argparse |
|||
import database |
|||
import logging |
|||
import sys |
|||
import typing |
|||
import enum |
|||
|
|||
RecordType = enum.Enum('RecordType', 'A AAAA CNAME PTR') |
|||
Record = typing.Tuple[RecordType, int, str, str] |
|||
|
|||
# select, write |
|||
FUNCTION_MAP: typing.Any = { |
|||
RecordType.A: ( |
|||
database.Database.get_ip4, |
|||
database.Database.set_hostname, |
|||
), |
|||
RecordType.CNAME: ( |
|||
database.Database.get_domain, |
|||
database.Database.set_hostname, |
|||
), |
|||
RecordType.PTR: ( |
|||
database.Database.get_domain, |
|||
database.Database.set_ip4address, |
|||
), |
|||
} |
|||
|
|||
|
|||
class Parser(): |
|||
def __init__(self, buf: typing.Any) -> None: |
|||
self.buf = buf |
|||
self.log = logging.getLogger('parser') |
|||
self.db = database.Database() |
|||
|
|||
def end(self) -> None: |
|||
self.db.save() |
|||
|
|||
def register(self, |
|||
rtype: RecordType, |
|||
updated: int, |
|||
name: str, |
|||
value: str |
|||
) -> None: |
|||
|
|||
self.db.enter_step('register') |
|||
select, write = FUNCTION_MAP[rtype] |
|||
for source in select(self.db, value): |
|||
# write(self.db, name, updated, source=source) |
|||
write(self.db, name, updated) |
|||
|
|||
def consume(self) -> None: |
|||
raise NotImplementedError |
|||
|
|||
|
|||
class Rapid7Parser(Parser): |
|||
TYPES = { |
|||
'a': RecordType.A, |
|||
'aaaa': RecordType.AAAA, |
|||
'cname': RecordType.CNAME, |
|||
'ptr': RecordType.PTR, |
|||
} |
|||
|
|||
def consume(self) -> None: |
|||
data = dict() |
|||
for line in self.buf: |
|||
self.db.enter_step('parse_rapid7') |
|||
split = line.split('"') |
|||
|
|||
for k in range(1, 14, 4): |
|||
key = split[k] |
|||
val = split[k+2] |
|||
data[key] = val |
|||
|
|||
self.register( |
|||
Rapid7Parser.TYPES[data['type']], |
|||
int(data['timestamp']), |
|||
data['name'], |
|||
data['value'] |
|||
) |
|||
|
|||
|
|||
class DnsMassParser(Parser): |
|||
# dnsmass --output Snrql |
|||
# --retry REFUSED,SERVFAIL --resolvers nameservers-ipv4 |
|||
TYPES = { |
|||
'A': (RecordType.A, -1, None), |
|||
'AAAA': (RecordType.AAAA, -1, None), |
|||
'CNAME': (RecordType.CNAME, -1, -1), |
|||
} |
|||
|
|||
def consume(self) -> None: |
|||
self.db.enter_step('parse_dnsmass') |
|||
timestamp = 0 |
|||
header = True |
|||
for line in self.buf: |
|||
line = line[:-1] |
|||
if not line: |
|||
header = True |
|||
continue |
|||
|
|||
split = line.split(' ') |
|||
try: |
|||
if header: |
|||
timestamp = int(split[1]) |
|||
header = False |
|||
else: |
|||
dtype, name_offset, value_offset = \ |
|||
DnsMassParser.TYPES[split[1]] |
|||
self.register( |
|||
dtype, |
|||
timestamp, |
|||
split[0][:name_offset], |
|||
split[2][:value_offset], |
|||
) |
|||
self.db.enter_step('parse_dnsmass') |
|||
except KeyError: |
|||
continue |
|||
|
|||
|
|||
PARSERS = { |
|||
'rapid7': Rapid7Parser, |
|||
'dnsmass': DnsMassParser, |
|||
} |
|||
|
|||
if __name__ == '__main__': |
|||
|
|||
# Parsing arguments |
|||
log = logging.getLogger('feed_dns') |
|||
args_parser = argparse.ArgumentParser( |
|||
description="TODO") |
|||
args_parser.add_argument( |
|||
'parser', |
|||
choices=PARSERS.keys(), |
|||
help="TODO") |
|||
args_parser.add_argument( |
|||
'-i', '--input', type=argparse.FileType('r'), default=sys.stdin, |
|||
help="TODO") |
|||
args = args_parser.parse_args() |
|||
|
|||
parser = PARSERS[args.parser](args.input) |
|||
try: |
|||
parser.consume() |
|||
except KeyboardInterrupt: |
|||
pass |
|||
parser.end() |
|||
|
Write
Preview
Loading…
Cancel
Save
Reference in new issue