|
1 |
import os, httplib, time
|
|
2 |
from PreludeCorrelator import require
|
|
3 |
from PreludeCorrelator.idmef import IDMEF
|
|
4 |
from PreludeCorrelator.pluginmanager import Plugin
|
|
5 |
from PreludeCorrelator.context import Context, Timer
|
|
6 |
from netcidr import CIDR, Networks
|
|
7 |
|
|
8 |
class SpamhausDropPlugin(Plugin):
|
|
9 |
RELOAD = 7 * 24 * 60 * 60
|
|
10 |
SERVER = "www.spamhaus.org"
|
|
11 |
URI = "/drop/drop.lasso"
|
|
12 |
TIMEOUT = 10
|
|
13 |
FILENAME = require.get_data_filename(__name__, "spamhaus_drop.dat")
|
|
14 |
|
|
15 |
def __ipNormalize(self, ip):
|
|
16 |
return ".".join([ i.lstrip("0") for i in ip.split(".") ])
|
|
17 |
|
|
18 |
def __loadData(self, age=0):
|
|
19 |
for line in open(self.__filename, "r"):
|
|
20 |
if line[0] == ';':
|
|
21 |
continue
|
|
22 |
|
|
23 |
ip, sbl = line.split(';')
|
|
24 |
ip = CIDR(ip.strip())
|
|
25 |
self.__mynets.append(ip)
|
|
26 |
|
|
27 |
self.__mynets = Networks(self.__mynets)
|
|
28 |
if self.__reload > 0:
|
|
29 |
Timer(self.__reload - age, self.__retrieveData).start()
|
|
30 |
|
|
31 |
def __downloadData(self):
|
|
32 |
self.info("Downloading host list, this might take some time...")
|
|
33 |
|
|
34 |
try:
|
|
35 |
con = httplib.HTTPConnection(self.__server, timeout=self.__timeout)
|
|
36 |
except TypeError:
|
|
37 |
con = httplib.HTTPConnection(self.__server)
|
|
38 |
|
|
39 |
con.request("GET", self.__uri)
|
|
40 |
r = con.getresponse()
|
|
41 |
if r.status != 200:
|
|
42 |
raise Exception, "Could not download spamhaus DROP list, error %d" % r.status
|
|
43 |
|
|
44 |
fd = open(self.__filename, "w")
|
|
45 |
fd.write(r.read())
|
|
46 |
fd.close()
|
|
47 |
|
|
48 |
self.info("Downloading done, processing data.")
|
|
49 |
|
|
50 |
def __retrieveData(self, timer=None):
|
|
51 |
try:
|
|
52 |
st = os.stat(self.__filename)
|
|
53 |
if self.__reload <= 0 or time.time() - st.st_mtime < self.__reload:
|
|
54 |
return self.__loadData(time.time() - st.st_mtime)
|
|
55 |
except OSError:
|
|
56 |
pass
|
|
57 |
|
|
58 |
self.__downloadData()
|
|
59 |
self.__loadData()
|
|
60 |
|
|
61 |
|
|
62 |
def __init__(self, env):
|
|
63 |
Plugin.__init__(self, env)
|
|
64 |
|
|
65 |
self.__mynets = []
|
|
66 |
self.__reload = self.getConfigValue("reload", self.RELOAD, type=int)
|
|
67 |
self.__filename = self.getConfigValue("filename", self.FILENAME)
|
|
68 |
self.__server = self.getConfigValue("server", self.SERVER)
|
|
69 |
self.__uri = self.getConfigValue("uri", self.URI)
|
|
70 |
self.__timeout = self.getConfigValue("timeout", self.TIMEOUT, type=float)
|
|
71 |
self.__retrieveData()
|
|
72 |
|
|
73 |
def run(self, idmef):
|
|
74 |
for source in idmef.Get("alert.source(*).node.address(*).address"):
|
|
75 |
src = CIDR(source)
|
|
76 |
if src in self.__mynets:
|
|
77 |
ca = IDMEF()
|
|
78 |
ca.addAlertReference(idmef)
|
|
79 |
ca.Set("alert.classification.text", "IP source matching Spamhaus DROP dataset")
|
|
80 |
ca.Set("alert.correlation_alert.name", "IP source matching Spamhaus DROP dataset")
|
|
81 |
ca.Set("alert.assessment.impact.description", "Spamhaus gathered this IP address in their DROP list - %s" % (source))
|
|
82 |
ca.Set("alert.assessment.impact.severity", "medium")
|
|
83 |
ca.alert()
|
|
84 |
|