[tor-commits] [ooni-probe/master] Merge branch 'master' into tcp-flags
isis at torproject.org
isis at torproject.org
Tue Dec 18 05:53:46 UTC 2012
commit 20cd1df9de76b54a77aac22e44f457272354c0cb
Merge: 4142c7f 9688f83
Author: Isis Lovecruft <isis at torproject.org>
Date: Wed Dec 12 01:39:33 2012 +0000
Merge branch 'master' into tcp-flags
Conflicts:
ooni/inputunit.py
ooni/nettest.py
ooni/oonicli.py
ooni/reporter.py
ooni/runner.py
ooni/utils/hacks.py
ooni/utils/net.py
ooni/utils/txscapy.py
ooniprobe.conf.sample
nettests/bridge_reachability/tcpsyn.py | 191 +++++++++++++++++++++
nettests/experimental/bridge_reachability/echo.py | 150 ++++++++++++-----
ooni/inputunit.py | 20 ++-
ooni/nettest.py | 60 ++++++-
ooni/oonicli.py | 40 +++--
ooni/reporter.py | 45 +++---
ooni/runner.py | 157 +++++++++++++----
ooni/utils/__init__.py | 19 +-
ooni/utils/geodata.py | 16 ++-
ooni/utils/hacks.py | 7 +-
ooni/utils/log.py | 27 +++
ooni/utils/net.py | 113 +++++++++++--
ooni/utils/txscapy.py | 15 +-
ooniprobe.conf.sample | 3 +
14 files changed, 712 insertions(+), 151 deletions(-)
diff --cc nettests/experimental/bridge_reachability/echo.py
index d4033dd,0000000..0f422ca
mode 100644,000000..100644
--- a/nettests/experimental/bridge_reachability/echo.py
+++ b/nettests/experimental/bridge_reachability/echo.py
@@@ -1,132 -1,0 +1,200 @@@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# @authors: Isis Lovecruft, <isis at torproject.org>
+# @version: 0.0.2-pre-alpha
+# @license: copyright (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.python import usage
- from twisted.internet import reactor, defer
++from twisted.internet import reactor, defer, address
+from ooni import nettest
+from ooni.utils import log, net, Storage, txscapy
+
+try:
- from scapy.all import IP, ICMP
- from scapy.all import sr1
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import BaseScapyTest
- except:
++ from scapy.all import IP, ICMP
++ from scapy.all import sr1
++ from ooni.utils import txscapy
++except Exception, e:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
++ log.exception(e)
+
+class UsageOptions(usage.Options):
++ """
++ Options for EchoTest.
++
++ Note: 'count', 'size', and 'ttl' have yet to be implemented.
++ """
+ optParameters = [
+ ['dst', 'd', None, 'Host IP to ping'],
+ ['file', 'f', None, 'File of list of IPs to ping'],
++ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['interface', 'i', None, 'Network interface to use'],
++ ['receive', 'r', True, 'Receive response packets'],
++ ['timeout', 't', 2, 'Seconds to wait if no response', int],
+ ['count', 'c', 1, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']]
++ ['size', 's', 56, 'Bytes to send in ICMP data field', int],
++ ['ttl', 'l', 25, 'Set the IP Time to Live', int]]
+
+class EchoTest(nettest.NetTestCase):
+ """
- xxx fill me in
++ Basic ping test. This takes an input file containing one IP or hostname
++ per line.
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis at torproject.org>'
+ description = 'A simple ping test to see if a host is reachable.'
+ version = '0.0.2'
+ requiresRoot = True
+
+ usageOptions = UsageOptions
+ #requiredOptions = ['dst']
+
+ def setUp(self, *a, **kw):
++ """
++ Send an ICMP-8 packet to a host IP, and process the response.
++
++ @param timeout:
++ Seconds after sending the last packet to timeout.
++ @param interface:
++ The interface to restrict listening to.
++ @param dst:
++ A single host to ping.
++ @param file:
++ A file of hosts to ping, one per line.
++ @param receive:
++ Whether or not to receive replies. Defaults to True.
++ @param pcap:
++ The file to save packet captures to.
++ """
+ self.destinations = {}
+
+ if self.localOptions:
+ for key, value in self.localOptions.items():
+ log.debug("setting self.%s = %s" % (key, value))
+ setattr(self, key, value)
+
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ try:
+ iface = txscapy.getDefaultIface()
+ except Exception, e:
+ log.msg("No network interface specified!")
- log.err(e)
++ log.exception(e)
+ else:
+ log.msg("Using system default interface: %s" % iface)
+ self.interface = iface
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ else:
+ self.pcap = net.capturePacket(self.pcapfile)
+
+ if not self.dst:
+ if self.file:
+ self.dstProcessor(self.file)
- for key, value in self.destinations.items():
- for label, data in value.items():
- if not 'ans' in data:
- self.dst = label
++ for address, details in self.destinations.items():
++ for labels, data in details.items():
++ if not 'response' in labels:
++ self.dst = details['dst_ip']
+ else:
+ self.addDest(self.dst)
- log.debug("self.dst is now: %s" % self.dst)
+
+ log.debug("Initialization of %s test completed." % self.name)
+
+ def addDest(self, dest):
+ d = dest.strip()
+ self.destinations[d] = {'dst_ip': d}
+
+ def dstProcessor(self, inputfile):
- from ipaddr import IPAddress
-
+ if os.path.isfile(inputfile):
+ with open(inputfile) as f:
+ for line in f.readlines():
+ if line.startswith('#'):
+ continue
+ self.addDest(line)
+
++ def build_packets(self):
++ """
++ Construct a list of packets to send out.
++ """
++ packets = []
++ for dest, data in self.destinations.items():
++ pkt = IP(dst=dest)/ICMP()
++ packets.append(pkt)
++ ## XXX if a domain was specified, we need a way to check that
++ ## its IP matches the one we're seeing in pkt.src
++ #try:
++ # address.IPAddress(dest)
++ #except:
++ # data['dst_ip'] = pkt.dst
++ return packets
++
+ def test_icmp(self):
- def process_response(echo_reply, dest):
- ans, unans = echo_reply
- if ans:
- log.msg("Recieved echo reply from %s: %s" % (dest, ans))
- else:
- log.msg("No reply was received from %s. Possible censorship event." % dest)
- log.debug("Unanswered packets: %s" % unans)
- self.report[dest] = echo_reply
-
- for label, data in self.destinations.items():
- reply = sr1(IP(dst=lebal)/ICMP())
- process = process_reponse(reply, label)
-
- #(ans, unans) = ping
- #self.destinations[self.dst].update({'ans': ans,
- # 'unans': unans,
- # 'response_packet': ping})
- #return ping
-
- #return reply
++ """
++ Send the list of ICMP packets.
++
++ TODO: add end summary progress report for % answered, etc.
++ """
++ try:
++ def nicely(packets):
++ """Print scapy summary nicely."""
++ return list([x.summary() for x in packets])
++
++ def process_answered((answered, sent)):
++ """Callback function for txscapy.sr()."""
++ self.report['sent'] = nicely(sent)
++ self.report['answered'] = [nicely(ans) for ans in answered]
++
++ for req, resp in answered:
++ log.msg("Received echo-reply:\n%s" % resp.summary())
++ for dest, data in self.destinations.items():
++ if data['dst_ip'] == resp.src:
++ data['response'] = resp.summary()
++ data['censored'] = False
++ for snd in sent:
++ if snd.dst == resp.src:
++ answered.remove((req, resp))
++ return (answered, sent)
++
++ def process_unanswered((unanswered, sent)):
++ """
++ Callback function for remaining packets and destinations which
++ do not have an associated response.
++ """
++ if len(unanswered) > 0:
++ nicer = [nicely(unans) for unans in unanswered]
++ log.msg("Unanswered/remaining packets:\n%s"
++ % nicer)
++ self.report['unanswered'] = nicer
++ for dest, data in self.destinations.items():
++ if not 'response' in data:
++ log.msg("No reply from %s. Possible censorship event."
++ % dest)
++ data['response'] = None
++ data['censored'] = True
++ return (unanswered, sent)
++
++ packets = self.build_packets()
++ d = txscapy.sr(packets, iface=self.interface, multi=True)
++ d.addCallback(process_answered)
++ d.addErrback(log.exception)
++ d.addCallback(process_unanswered)
++ d.addErrback(log.exception)
++ self.report['destinations'] = self.destinations
++ return d
++ except Exception, e:
++ log.exception(e)
diff --cc ooni/inputunit.py
index 2ef89d8,4a5c792..0b4377f
--- a/ooni/inputunit.py
+++ b/ooni/inputunit.py
@@@ -6,9 -6,10 +6,10 @@@
# units. Input units are how the inputs to be fed to tests are
# split up into.
#
--# :authors: Arturo Filastò
++# :authors: Arturo Filastò, Isis Lovecruft
# :license: see included LICENSE file
+
class InputUnitFactory(object):
"""
This is a factory that takes the size of input units to be generated a set
@@@ -61,19 -50,31 +62,30 @@@
class InputUnit(object):
"""
This is a python iterable object that contains the input elements to be
- passed onto a TestCase.
+ passed onto a :class:`ooni.nettest.NetTestCase`.
"""
def __init__(self, inputs=[]):
+ """
+ Create an iterable from a list of inputs, which can be given to a NetTestCase.
+
+ @param inputs: A list of inputs for a NetTestCase.
+ """
self._inputs = iter(inputs)
+ # _inputs_copy is to avoid stealing things from
+ # the iterator when __repr__ is called:
- _inputs_copy = inputs
- self.__contains__ = _inputs_copy
++ self._inputs_copy = inputs
- def __repr__(self):
+ def __str__(self):
- return "<%s inputs=%s>" % (self.__class__, self._inputs)
+ """Prints the original input list."""
- return "<%s inputs=%s>" % (self.__class__, self.__contains__)
++ return "<%s inputs=%s>" % (self.__class__, self._inputs_copy)
def __add__(self, inputs):
+ """Add a list of inputs to the iterator."""
- for input in inputs:
- self._inputs.append(input)
+ for i in inputs:
+ self._inputs.append(i)
def __iter__(self):
+ """Self explanatory."""
return self
def next(self):
@@@ -80,5 -82,5 +93,6 @@@
return self._inputs.next()
def append(self, input):
+ """Add an item to the end of the InputUnit iterator."""
self._inputs.append(input)
+
diff --cc ooni/nettest.py
index 8374db1,29ced70..8fc29c3
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@@ -134,21 -119,9 +137,19 @@@ class NetTestCase(object)
pass
def setUp(self):
- """
- Place here your logic to be executed when the test is being setup.
- """
+ """Place your logic to be executed when the test is being setup here."""
pass
+ def postProcessor(self, report):
+ """
+ Subclass this to do post processing tasks that are to occur once all
+ the test methods have been called. Once per input.
+ postProcessing works exactly like test methods, in the sense that
+ anything that gets written to the object self.report[] will be added to
+ the final test report.
+ """
+ raise NoPostProcessor
+
def inputProcessor(self, filename=None):
"""
You may replace this with your own custom input processor. It takes as
@@@ -185,22 -158,17 +186,24 @@@
if not self.localOptions[required_option]:
raise usage.UsageError("%s not specified!" % required_option)
- def _processOptions(self, options=None):
- if self.inputFile:
- self.inputs = self.inputProcessor(self.inputFile)
+ def _processOptions(self):
+ if self.inputFilename:
+ inputProcessor = self.inputProcessor
+ inputFilename = self.inputFilename
+ class inputProcessorIterator(object):
+ """
+ Here we convert the input processor generator into an iterator
+ so that we can run it twice.
+ """
+ def __iter__(self):
+ return inputProcessor(inputFilename)
+ self.inputs = inputProcessorIterator()
+ self._checkRequiredOptions()
+
- # XXX perhaps we may want to name and version to be inside of a
- # different method that is not called options.
return {'inputs': self.inputs,
- 'name': self.name,
- 'version': self.version}
+ 'name': self.name, 'version': self.version
+ }
def __repr__(self):
return "<%s inputs=%s>" % (self.__class__, self.inputs)
diff --cc ooni/oonicli.py
index f706ee3,c64e445..9d4f783
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@@ -20,15 -19,19 +20,18 @@@ from twisted.application import ap
from twisted.python import usage, failure
from twisted.python.util import spewer
- from ooni import nettest, runner, reporter, config
-## Get rid of the annoying "No route found for
-## IPv6 destination warnings":
++# Supress scapy's "No route found for IPv6 destination" warnings:
+ import logging as pylogging
+ pylogging.getLogger("scapy.runtime").setLevel(pylogging.ERROR)
+ from ooni import nettest, runner, reporter, config
+from ooni.inputunit import InputUnitFactory
-
from ooni.utils import net
- from ooni.utils import checkForRoot, NotRootError
+ from ooni.utils import checkForRoot, PermissionsError
from ooni.utils import log
-from ooni.inputunit import InputUnitFactory
+
-class Options(usage.Options, app.ReactorSelectionMixin):
+class Options(usage.Options):
synopsis = """%s [options] [path to test].py
""" % (os.path.basename(sys.argv[0]),)
@@@ -78,68 -79,13 +81,66 @@@
except:
raise usage.UsageError("No test filename specified!")
+def updateStatusBar():
+ for test_filename in config.state.keys():
+ # The ETA is not updated so we we will not print it out for the
+ # moment.
+ eta = config.state[test_filename].eta()
+ progress = config.state[test_filename].progress()
+ progress_bar_frmt = "[%s] %s%%" % (test_filename, progress)
+ print progress_bar_frmt
+
def testsEnded(*arg, **kw):
- """
- You can place here all the post shutdown tasks.
- """
- log.debug("testsEnded: Finished running all tests")
+ """You can place here all the post shutdown tasks."""
+ log.debug("Finished running all tests")
+ config.start_reactor = False
- try: reactor.stop()
- except: pass
++ if not reactor.running:
++ try: reactor.stop()
++ except: reactor.runUntilCurrent()
+
+def startSniffing():
+ from ooni.utils.txscapy import ScapyFactory, ScapySniffer
+ try:
+ checkForRoot()
+ except NotRootError:
+ print "[!] Includepcap options requires root priviledges to run"
+ print " you should run ooniprobe as root or disable the options in ooniprobe.conf"
+ sys.exit(1)
+
+ print "Starting sniffer"
+ config.scapyFactory = ScapyFactory(config.advanced.interface)
+
+ sniffer = ScapySniffer(config.reports.pcap)
+ config.scapyFactory.registerProtocol(sniffer)
+
+def runTestList(none, test_list):
+ """
+ none: is always None.
+
+ test_list (list): a list of tuples containing (test_cases, options,
+ cmd_line_options)
+ """
+ deck_dl = []
+
+ for test in test_list:
+ test_cases, options, cmd_line_options = test
+ d1 = runner.runTestCases(test_cases, options, cmd_line_options)
+ deck_dl.append(d1)
+
+ d2 = defer.DeferredList(deck_dl)
+ d2.addBoth(testsEnded)
+
+ # Print every 5 second the list of current tests running
+ l = task.LoopingCall(updateStatusBar)
+ l.start(5.0)
+ return d2
+
+def errorRunningTests(failure):
+ failure.printTraceback()
def run():
- """
- Parses command line arguments of test.
- """
+ """Call me to begin testing from a file."""
+
cmd_line_options = Options()
if len(sys.argv) == 1:
cmd_line_options.getUsage()
@@@ -148,41 -94,42 +149,52 @@@
except usage.UsageError, ue:
raise SystemExit, "%s: %s" % (sys.argv[0], ue)
- if cmd_line_options['debug-stacktraces']:
- defer.setDebugging(True)
-
log.start(cmd_line_options['logfile'])
- test_file_name = os.path.basename(cmd_line_options['test'])
- log.debug("Running script %s" % test_file_name)
-
- yamloo_filename, pcap_filename = config.oreport_filenames(test_file_name)
-
- if cmd_line_options['reportfile']:
- yamloo_filename = cmd_line_options['reportfile']
- pcap_filename = yamloo_filename+".pcap"
-
- if os.path.exists(yamloo_filename):
- log.msg("Report already exists with filename %s" % yamloo_filename)
- log.msg("Renaming it to %s" % yamloo_filename+'.old')
- os.rename(yamloo_filename, yamloo_filename+'.old')
- if os.path.exists(pcap_filename):
- log.msg("Report already exists with filename %s" % pcap_filename)
- log.msg("Renaming it to %s" % pcap_filename+'.old')
- os.rename(pcap_filename, pcap_filename+'.old')
-
- classes = runner.findTestClassesFromConfig(cmd_line_options)
- test_cases, options = runner.loadTestsAndOptions(classes, cmd_line_options)
if config.privacy.includepcap:
- log.msg("Starting")
- runner.startSniffing()
+ try:
+ checkForRoot()
+ except PermissionsError, pe:
- log.err(str("'includepcap' option requires administrator or root ",
- "privileges to run. Run ooniprobe as root or disable ",
- "the includepcap option in ooniprobe.conf "))
++ m = ("Capturing packets requires administrator/root privileges. ",
++ "Run ooniprobe as root or set 'includepcap = false' in ",
++ "ooniprobe.conf .")
++ log.warn("%s" % m)
+ sys.exit(1)
- log.debug("Starting sniffer")
- sniffer_d = net.capturePackets(pcap_filename)
++ else:
++ log.msg("Starting packet capture")
++ runner.startSniffing()
+
+ resume = cmd_line_options['resume']
+
+ # contains (test_cases, options, cmd_line_options)
+ test_list = []
+
+ if cmd_line_options['testdeck']:
+ test_deck = yaml.safe_load(open(cmd_line_options['testdeck']))
+ for test in test_deck:
+ del cmd_line_options
+ cmd_line_options = test['options']
+ if resume:
+ cmd_line_options['resume'] = True
+ else:
+ cmd_line_options['resume'] = False
+ test_list.append(runner.loadTest(cmd_line_options))
+ else:
+ log.msg("No test deck detected")
+ del cmd_line_options['testdeck']
+ test_list.append(runner.loadTest(cmd_line_options))
+
+ if config.advanced.start_tor:
+ log.msg("Starting Tor...")
+ d = runner.startTor()
+ d.addCallback(runTestList, test_list)
+ d.addErrback(errorRunningTests)
+ else:
+ # We need to pass None as first argument because when the callback is
+ # fired it will pass it's result to runTestCase.
+ d = runTestList(None, test_list)
+ d.addErrback(errorRunningTests)
+
++ # XXX I believe we don't actually need this:
+ reactor.run()
+
- tests_d = runner.runTestCases(test_cases, options,
- cmd_line_options, yamloo_filename)
- tests_d.addBoth(testsEnded)
diff --cc ooni/reporter.py
index 29e6049,133b98d..fc41960
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@@ -15,97 -13,23 +14,94 @@@ import o
import time
import yaml
import json
+ import traceback
+from yaml.representer import *
+from yaml.emitter import *
+from yaml.serializer import *
+from yaml.resolver import *
from twisted.python.util import untilConcludes
from twisted.trial import reporter
from twisted.internet import defer, reactor
+from twisted.internet.error import ConnectionRefusedError
- from ooni.utils import log
-from ooni import config
-from ooni.templates.httpt import BodyReceiver, StringProducer
-from ooni.utils import otime, log, geodata
++from ooni import config, otime
++from ooni.utils import log, geodata
+ from ooni.utils.hacks import OSafeRepresenter, OSafeDumper
++from ooni.utils.net import BodyReceiver, StringProducer, userAgents
try:
- from scapy.all import packet
-except:
- class FooClass:
- Packet = object
- packet = FooClass
+ from scapy.packet import Packet
+except ImportError:
+ log.err("Scapy is not installed.")
+
-
- from ooni import otime
- from ooni.utils import geodata
- from ooni.utils.net import BodyReceiver, StringProducer, userAgents
-
- from ooni import config
-
+def createPacketReport(packet_list):
+ """
+ Takes as input a packet a list.
+
+ Returns a dict containing a dict with the packet
+ summary and the raw packet.
+ """
+ report = []
+ for packet in packet_list:
+ report.append({'raw_packet': str(packet),
+ 'summary': str(packet.summary())})
+ return report
+
+class OSafeRepresenter(SafeRepresenter):
+ """
+ This is a custom YAML representer that allows us to represent reports
+ safely.
+ It extends the SafeRepresenter to be able to also represent complex
+ numbers and scapy packet.
+ """
+ def represent_data(self, data):
+ """
+ This is very hackish. There is for sure a better way either by using
+ the add_multi_representer or add_representer, the issue though lies in
+ the fact that Scapy packets are metaclasses that leads to
+ yaml.representer.get_classobj_bases to not be able to properly get the
+ base of class of a Scapy packet.
+ XXX fully debug this problem
+ """
+ if isinstance(data, Packet):
+ data = createPacketReport(data)
+ return SafeRepresenter.represent_data(self, data)
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = u'%r' % data.real
+ elif data.real == 0.0:
+ data = u'%rj' % data.imag
+ elif data.imag > 0:
+ data = u'%r+%rj' % (data.real, data.imag)
+ else:
+ data = u'%r%rj' % (data.real, data.imag)
+ return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
+
+OSafeRepresenter.add_representer(complex,
+ OSafeRepresenter.represent_complex)
+
+class OSafeDumper(Emitter, Serializer, OSafeRepresenter, Resolver):
+ """
+ This is a modification of the YAML Safe Dumper to use our own Safe
+ Representer that supports complex numbers.
+ """
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ OSafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
class NoTestIDSpecified(Exception):
pass
@@@ -180,10 -111,10 +185,10 @@@ class OReporter(object)
pass
def testDone(self, test, test_name):
- log.msg("Finished running %s" % test_name)
+ log.debug("Calling reporter to record results")
test_report = dict(test.report)
- if isinstance(test.input, packet.Packet):
+ if isinstance(test.input, Packet):
test_input = createPacketReport(test.input)
else:
test_input = test.input
@@@ -259,19 -189,11 +264,19 @@@ class OONIBTestDetailsLookupError(OONIB
pass
class OONIBReporter(OReporter):
- def __init__(self, backend_url):
- from twisted.web.client import Agent
+ def __init__(self, cmd_line_options):
+ self.backend_url = cmd_line_options['collector']
+ self.report_id = None
+
+ from ooni.utils.txagentwithsocks import Agent
from twisted.internet import reactor
- self.agent = Agent(reactor)
- self.backend_url = backend_url
+ try:
+ self.agent = Agent(reactor, sockshost="127.0.0.1",
+ socksport=int(config.tor.socks_port))
- except Exception, e:
- log.exception(e)
++ except Exception, ex:
++ log.exception(ex)
+
+ OReporter.__init__(self, cmd_line_options)
@defer.inlineCallbacks
def writeReportEntry(self, entry):
@@@ -308,47 -236,35 +313,47 @@@
test_name = options['name']
test_version = options['version']
- log.debug("Creating report with OONIB Reporter")
url = self.backend_url + '/report/new'
- software_version = '0.0.1'
- test_details = yield getTestDetails(options)
+ try:
+ test_details = yield getTestDetails(options)
- except Exception, e:
- log.exception(e)
++ except Exception, ex:
++ log.exception(ex)
+
+ test_details['options'] = self.cmd_line_options
+
+ log.debug("Obtained test_details: %s" % test_details)
content = '---\n'
content += safe_dump(test_details)
content += '...\n'
- request = {'software_name': 'ooniprobe',
- 'software_version': software_version,
+ request = {'software_name': test_details['software_name'],
+ 'software_version': test_details['software_version'],
'test_name': test_name,
'test_version': test_version,
- 'progress': 0,
'content': content
-- }
- log.debug("Creating report via url %s" % url)
++ }
+
+ log.msg("Reporting %s" % url)
request_json = json.dumps(request)
log.debug("Sending %s" % request_json)
bodyProducer = StringProducer(json.dumps(request))
- log.debug("Creating report via url %s" % url)
+
+ log.msg("Creating report with OONIB Reporter. Please be patient.")
+ log.msg("This may take up to 1-2 minutes...")
try:
- response = yield self.agent.request("POST", url,
+ response = yield self.agent.request("POST", url,
bodyProducer=bodyProducer)
- except:
- raise OONIBReportCreationFailed
+ except ConnectionRefusedError:
+ log.err("Connection to reporting backend failed (ConnectionRefusedError)")
+ raise OONIBReportCreationError
+
- except Exception, e:
- log.exception(e)
++ except Exception, ex:
++ log.exception(ex)
+ raise OONIBReportCreationError
# This is a little trix to allow us to unspool the response. We create
# a deferred and call yield on it.
diff --cc ooni/runner.py
index bc9b874,5969dd5..58ec188
--- a/ooni/runner.py
+++ b/ooni/runner.py
@@@ -13,28 -14,30 +14,36 @@@ import tim
import inspect
import traceback
import itertools
+import yaml
- from twisted.python import reflect, usage
- from twisted.internet import defer
+ from twisted.python import reflect, usage, failure
-from twisted.internet import defer, reactor
++from twisted.internet import defer, reactor, threads
from twisted.trial.runner import filenameToModule
- from twisted.internet import reactor, threads
+ from twisted.trial import reporter as txreporter
+ from twisted.trial import util as txtrutil
+ from twisted.trial.unittest import utils as txtrutils
+ from twisted.trial.unittest import SkipTest
-from twisted.internet import reactor, threads
+from txtorcon import TorProtocolFactory, TorConfig
+from txtorcon import TorState, launch_tor
+
- from ooni import config
-
++from ooni import config, nettest, reporter
+from ooni.reporter import OONIBReporter, YAMLReporter, OONIBReportError
-
from ooni.inputunit import InputUnitFactory
- from ooni.nettest import NetTestCase, NoPostProcessor
-
-from ooni import reporter, nettest
-from ooni.utils import log, checkForRoot, PermissionsError
+from ooni.utils import log, checkForRoot
- from ooni.utils import NotRootError, Storage
++from ooni.utils import PermissionsError, Storage
+from ooni.utils.net import randomFreePort
- def processTest(obj):
+ def isTestCase(obj):
+ """
+ Return True if obj is a subclass of NetTestCase, false if otherwise.
+ """
+ try:
+ return issubclass(obj, nettest.NetTestCase)
+ except TypeError:
+ return False
+
+ def processTest(obj, cmd_line_options):
"""
Process the parameters and :class:`twisted.python.usage.Options` of a
:class:`ooni.nettest.Nettest`.
@@@ -46,60 -48,61 +55,54 @@@
:param cmd_line_options:
A configured and instantiated :class:`twisted.python.usage.Options`
class.
+
"""
+ if not hasattr(obj.usageOptions, 'optParameters'):
+ obj.usageOptions.optParameters = []
+
+ if obj.inputFile:
+ obj.usageOptions.optParameters.append(obj.inputFile)
- input_file = obj.inputFile
+ if obj.requiresRoot:
+ try:
+ checkForRoot()
+ except PermissionsError:
+ log.err("%s requires root to run" % obj.name)
+ sys.exit(1)
+
- if obj.optParameters or input_file \
- or obj.usageOptions or obj.optFlags:
+ if obj.baseParameters:
+ for parameter in obj.baseParameters:
+ obj.usageOptions.optParameters.append(parameter)
- if not obj.optParameters:
- obj.optParameters = []
+ if obj.baseFlags:
+ if not hasattr(obj.usageOptions, 'optFlags'):
+ obj.usageOptions.optFlags = []
+ for flag in obj.baseFlags:
+ obj.usageOptions.optFlags.append(flag)
- if input_file:
- obj.optParameters.append(input_file)
+ options = obj.usageOptions()
- if obj.usageOptions:
- if input_file:
- obj.usageOptions.optParameters.append(input_file)
- options = obj.usageOptions()
- elif obj.optParameters:
- log.debug("Got optParameters")
- class Options(usage.Options):
- optParameters = obj.optParameters
- if obj.optFlags:
- log.debug("Got optFlags")
- optFlags = obj.optFlags
- options = Options()
+ options.parseOptions(config.cmd_line_options['subargs'])
+ obj.localOptions = options
- if options:
- options.parseOptions(cmd_line_options['subArgs'])
- obj.localOptions = options
+ if obj.inputFile:
+ obj.inputFilename = options[obj.inputFile[0]]
- if input_file and options:
- log.debug("Added input file to options list")
- obj.inputFile = options[input_file[0]]
-
- try:
- log.debug("processing options")
- tmp_test_case_object = obj()
- tmp_test_case_object._processOptions(options)
+ try:
+ log.debug("processing options")
+ tmp_test_case_object = obj()
+ tmp_test_case_object._checkRequiredOptions()
- except usage.UsageError, e:
- test_name = tmp_test_case_object.name
- print "There was an error in running %s!" % test_name
- print "%s" % e
- options.opt_help()
- raise usage.UsageError("Error in parsing command line args for %s" % test_name)
+ except usage.UsageError, e:
+ test_name = tmp_test_case_object.name
+ log.err("There was an error in running %s!" % test_name)
+ log.err("%s" % e)
+ options.opt_help()
+ raise usage.UsageError("Error in parsing command line args for %s" % test_name)
- if obj.requiresRoot:
- try:
- checkForRoot()
- except NotRootError:
- log.err("%s requires root to run" % obj.name)
- sys.exit(1)
-
return obj
- def isTestCase(obj):
- try:
- return issubclass(obj, NetTestCase)
- except TypeError:
- return False
-
-def findTestClassesFromConfig(cmd_line_options):
+def findTestClassesFromFile(filename):
"""
Takes as input the command line config parameters and returns the test
case classes.
@@@ -152,408 -153,187 +155,494 @@@ def loadTestsAndOptions(classes, cmd_li
return test_cases, options
+ def getTimeout(test_instance, test_method):
+ """
+ Returns the timeout value set on this test. Check on the instance first,
+ the the class, then the module, then package. As soon as it finds
+ something with a timeout attribute, returns that. Returns
+ twisted.trial.util.DEFAULT_TIMEOUT_DURATION if it cannot find anything.
+
+ See twisted.trial.unittest.TestCase docstring for more details.
+ """
+ try:
+ testMethod = getattr(test_instance, test_method)
+ except:
+ log.debug("_getTimeout couldn't find self.methodName!")
+ return txtrutil.DEFAULT_TIMEOUT_DURATION
+ else:
+ test_instance._parents = [testMethod, test_instance]
+ test_instance._parents.extend(txtrutil.getPythonContainers(testMethod))
+ timeout = txtrutil.acquireAttribute(test_instance._parents, 'timeout',
+ txtrutil.DEFAULT_TIMEOUT_DURATION)
+ try:
+ return float(timeout)
+ except (ValueError, TypeError):
+ warnings.warn("'timeout' attribute needs to be a number.",
+ category=DeprecationWarning)
+ return txtrutil.DEFAULT_TIMEOUT_DURATION
+
-def runTestWithInput(test_class, test_method, test_input, oreporter):
+def runTestCasesWithInput(test_cases, test_input, yaml_reporter,
+ oonib_reporter=None):
"""
- Runs a single testcase from a NetTestCase with one input.
+ Runs in parallel all the test methods that are inside of the specified test case.
+ Reporting happens every time a Test Method has concluded running.
+ Once all the test methods have been called we check to see if the
+ postProcessing class method returns something. If it does return something
+ we will write this as another entry inside of the report called post_processing.
+
+ Args:
+
+ test_cases (list): A list of tuples containing the test_class (a
+ class) and the test_method (a string)
+
+ test_input (instance): Any instance that will be passed as input to
+ the test.
+
+ yaml_reporter: An instance of :class:ooni.reporter.YAMLReporter
+
+ oonib_reporter: An instance of :class:ooni.reporter.OONIBReporter. If
+ this is set to none then we will only report to the YAML reporter.
+
"""
- log.debug("Running %s with %s" % (test_method, test_input))
+
+ # This is used to store a copy of all the test reports
+ tests_report = {}
+ def test_timeout(d):
+ timeout_error = defer.TimeoutError(
+ "%s test for %s timed out after %s seconds"
+ % (test_name, test_instance.input, test_instance.timeout))
+ timeout_fail = failure.Failure(err)
+ try:
+ d.errback(timeout_fail)
+ except defer.AlreadyCalledError:
+ # if the deferred has already been called but the *back chain is
+ # still unfinished, safely crash the reactor and report the timeout
+ reactor.crash()
+ test_instance._timedOut = True # see test_instance._wait
+ test_instance._test_result.addExpectedFailure(test_instance, fail)
+ test_timeout = txtrutils.suppressWarnings(
+ test_timeout, txtrutil.suppress(category=DeprecationWarning))
+
+ def test_skip_class(reason):
+ try:
+ d.errback(failure.Failure(SkipTest("%s" % reason)))
+ except defer.AlreadyCalledError:
+ pass # XXX not sure what to do here...
+
def test_done(result, test_instance, test_name):
- log.debug("Concluded %s with inputs %s"
- % (test_name, test_instance.input))
- return oreporter.testDone(test_instance, test_name)
+ log.msg("Successfully finished running %s" % test_name)
+ log.debug("Deferred callback result: %s" % result)
+ tests_report[test_name] = dict(test_instance.report)
+ if not oonib_reporter:
+ return yaml_reporter.testDone(test_instance, test_name)
+ d1 = oonib_reporter.testDone(test_instance, test_name)
+ d2 = yaml_reporter.testDone(test_instance, test_name)
+ return defer.DeferredList([d1, d2])
- def test_error(failure, test_instance, test_name):
- log.err("Error in running %s" % test_name)
- log.exception(failure)
+ def test_error(error, test_instance, test_name):
+ if isinstance(error, SkipTest):
+ log.warn("%s" % error.message)
+ else:
++ log.err("Error in running %s" % test_name)
+ log.exception(error)
+ return
+
+ def tests_done(result, test_class):
+ test_instance = test_class()
+ test_instance.report = {}
+ test_instance.input = None
+ test_instance._start_time = time.time()
+ post = getattr(test_instance, 'postProcessor')
+ try:
+ post_processing = post(tests_report)
+ if not oonib_reporter:
+ return yaml_reporter.testDone(test_instance, 'summary')
+ d1 = oonib_reporter.testDone(test_instance, 'summary')
+ d2 = yaml_reporter.testDone(test_instance, 'summary')
+ return defer.DeferredList([d1, d2])
+ except NoPostProcessor:
+ log.debug("No post processor configured")
+ return
- test_instance = test_class()
- test_instance.input = test_input
- test_instance.report = {}
- # XXX TODO the twisted.trial.reporter.TestResult is expected by
- # test_timeout(), but we should eventually replace it with a stub class
- test_instance._test_result = txreporter.TestResult()
- # use this to keep track of the test runtime
- test_instance._start_time = time.time()
- test_instance.timeout = getTimeout(test_instance, test_method)
- # call setups on the test
- test_instance._setUp()
- test_instance.setUp()
-
- test_skip = txtrutil.acquireAttribute(test_instance._parents, 'skip', None)
- if test_skip:
- log.warn("%s marked these tests to be skipped: %s"
- % (test_instance.name, test_skip))
- skip_list = [test_skip]
-
- test = getattr(test_instance, test_method)
- test_instance._testMethod = test
-
- d = defer.maybeDeferred(test)
-
- # register the timer with the reactor
- call_timeout = reactor.callLater(test_instance.timeout, test_timeout, d)
- d.addBoth(lambda x: call_timeout.active() and call_timeout.cancel() or x)
-
- # check if the class has been aborted
- if hasattr(test_instance.__class__, 'skip'):
- reason = getattr(test_instance.__class__, 'skip')
- call_skip = reactor.callLater(0, test_skip_class, reason)
- d.addBoth(lambda x: call_skip.active() and call_skip.cancel() or x)
-
- d.addCallback(test_done, test_instance, test_method)
- d.addErrback(test_error, test_instance, test_method)
+ dl = []
+ for test_case in test_cases:
+ log.debug("Processing %s" % test_case[1])
+ test_class = test_case[0]
+ test_method = test_case[1]
-
- log.msg("Running %s with %s..." % (test_method, test_input))
-
+ test_instance = test_class()
+ test_instance.input = test_input
+ test_instance.report = {}
++
++ # XXX TODO the twisted.trial.reporter.TestResult is expected by
++ # test_timeout(), but we should eventually replace it with a stub class
++ test_instance._test_result = txreporter.TestResult()
++
+ # use this to keep track of the test runtime
+ test_instance._start_time = time.time()
++ test_instance.timeout = getTimeout(test_instance, test_method)
++
+ # call setups on the test
+ test_instance._setUp()
+ test_instance.setUp()
++
++ # check if we're inherited from anything marked to be skipped
++ test_skip = txtrutil.acquireAttribute(test_instance._parents, 'skip', None)
++ if test_skip:
++ log.warn("%s marked these tests to be skipped: %s"
++ % (test_instance.name, test_skip))
++ skip_list = [test_skip]
++
+ test = getattr(test_instance, test_method)
++ test_instance._testMethod = test
+
+ d = defer.maybeDeferred(test)
++
++ # register the timer with the reactor
++ call_timeout = reactor.callLater(test_instance.timeout, test_timeout, d)
++ d.addBoth(lambda x: call_timeout.active() and call_timeout.cancel() or x)
++
++ # check if the class has been aborted
++ if hasattr(test_instance.__class__, 'skip'):
++ reason = getattr(test_instance.__class__, 'skip')
++ call_skip = reactor.callLater(0, test_skip_class, reason)
++ d.addBoth(lambda x: call_skip.active() and call_skip.cancel() or x)
++
+ d.addCallback(test_done, test_instance, test_method)
+ d.addErrback(test_error, test_instance, test_method)
+ dl.append(d)
- return d
+ test_methods_d = defer.DeferredList(dl)
+ test_methods_d.addCallback(tests_done, test_cases[0][0])
+ return test_methods_d
-def runTestWithInputUnit(test_class, test_method, input_unit, oreporter):
+def runTestCasesWithInputUnit(test_cases, input_unit, yaml_reporter,
+ oonib_reporter):
"""
- @param test_class:
- The uninstantiated :class:`ooni.nettest.NetTestCase` to be run.
- @param test_method:
- A string representing the method name to be called.
+ Runs the Test Cases that are given as input parallely.
+ A Test Case is a subclass of ooni.nettest.NetTestCase and a list of
+ methods.
+
+ The deferred list will fire once all the test methods have been
+ run once per item in the input unit.
+
- test_cases: A list of tuples containing the test class and the test method as a string.
-
- input_unit: A generator that yields an input per iteration
-
++ @param test_cases:
++ A tuple containing the test_class and test_method as strings.
+ @param input_unit:
+ A generator that contains the inputs to be run on the test.
- @param oreporter:
- A :class:`ooni.reporter.OReporter` instance.
-
- @return: A DeferredList containing all the tests to be run at this time.
++ @return:
++ A DeferredList containing all the tests to be run at this time.
"""
- log.debug("Running test cases with input unit")
dl = []
for test_input in input_unit:
- d = runTestWithInput(test_class, test_method, test_input, oreporter)
+ log.debug("Running test with this input %s" % test_input)
+ d = runTestCasesWithInput(test_cases,
+ test_input, yaml_reporter, oonib_reporter)
dl.append(d)
return defer.DeferredList(dl)
- at defer.inlineCallbacks
-def runTestCases(test_cases, options,
- cmd_line_options, yamloo_filename):
+class InvalidResumeFile(Exception):
+ pass
+
+class noResumeSession(Exception):
+ pass
+
+def loadResumeFile():
"""
- XXX we should get rid of the InputUnit class, because we go though the
- effort of creating an iterator, only to turn it back into a list, and then
- iterate through it. it's also buggy as hell, and it's excess code.
+ Sets the singleton stateDict object to the content of the resume file.
+ If the file is empty then it will create an empty one.
+
+ Raises:
+
+ :class:ooni.runner.InvalidResumeFile if the resume file is not valid
+
"""
- try:
- assert len(options) != 0, "Length of options is zero!"
- except AssertionError, ae:
- test_inputs = []
- log.err(ae)
- else:
+ if not config.stateDict:
try:
- first = options.pop(0)
+ config.stateDict = yaml.safe_load(open(config.resume_filename))
except:
- first = options
+ log.err("Error loading YAML file")
+ raise InvalidResumeFile
+
+ if not config.stateDict:
+ yaml.safe_dump(dict(), open(config.resume_filename, 'w+'))
+ config.stateDict = dict()
- if 'inputs' in first:
- test_inputs = options['inputs']
+ elif isinstance(config.stateDict, dict):
+ return
else:
- log.msg("Could not find inputs!")
- log.msg("options[0] = %s" % first)
- test_inputs = [None]
+ log.err("The resume file is of the wrong format")
+ raise InvalidResumeFile
+
+def resumeTest(test_filename, input_unit_factory):
+ """
+ Returns the an input_unit_factory that is at the index of the previous run of the test
+ for the specified test_filename.
+
+ Args:
+
+ test_filename (str): the filename of the test that is being run
+ including the .py extension.
+
+ input_unit_factory (:class:ooni.inputunit.InputUnitFactory): with the
+ same input of the past run.
+
+ Returns:
+
+ :class:ooni.inputunit.InputUnitFactory that is at the index of the
+ previous test run.
+
+ """
+ try:
+ idx = config.stateDict[test_filename]
+ for x in range(idx):
+ try:
+ input_unit_factory.next()
+ except StopIteration:
+ log.msg("Previous run was complete")
+ return input_unit_factory
+
+ return input_unit_factory
+
+ except KeyError:
+ log.debug("No resume key found for selected test name. It is therefore 0")
+ config.stateDict[test_filename] = 0
+ return input_unit_factory
+
+ at defer.inlineCallbacks
+def updateResumeFile(test_filename):
+ """
+ update the resume file with the current stateDict state.
+ """
+ log.debug("Acquiring lock for %s" % test_filename)
+ yield config.resume_lock.acquire()
+
+ current_resume_state = yaml.safe_load(open(config.resume_filename))
+ current_resume_state = config.stateDict
+ yaml.safe_dump(current_resume_state, open(config.resume_filename, 'w+'))
+
+ log.debug("Releasing lock for %s" % test_filename)
+ config.resume_lock.release()
+ defer.returnValue(config.stateDict[test_filename])
+
+ at defer.inlineCallbacks
+def increaseInputUnitIdx(test_filename):
+ """
+ Args:
- reportFile = open(yamloo_filename, 'w+')
+ test_filename (str): the filename of the test that is being run
+ including the .py extension.
+
+ input_unit_idx (int): the current input unit index for the test.
+
+ """
+ config.stateDict[test_filename] += 1
+ yield updateResumeFile(test_filename)
+
+def updateProgressMeters(test_filename, input_unit_factory,
+ test_case_number):
+ """
+ Update the progress meters for keeping track of test state.
+ """
+ log.msg("Setting up progress meters")
+ if not config.state.test_filename:
+ config.state[test_filename] = Storage()
+
+ config.state[test_filename].per_item_average = 2.0
+
+ input_unit_idx = float(config.stateDict[test_filename])
+ input_unit_items = float(len(input_unit_factory) + 1)
+ test_case_number = float(test_case_number)
+ total_iterations = input_unit_items * test_case_number
+ current_iteration = input_unit_idx * test_case_number
+
+ log.debug("input_unit_items: %s" % input_unit_items)
+ log.debug("test_case_number: %s" % test_case_number)
+
+ log.debug("Test case number: %s" % test_case_number)
+ log.debug("Total iterations: %s" % total_iterations)
+ log.debug("Current iteration: %s" % current_iteration)
+
+ def progress():
+ return (current_iteration / total_iterations) * 100.0
+
+ config.state[test_filename].progress = progress
+
+ def eta():
+ return (total_iterations - current_iteration) \
+ * config.state[test_filename].per_item_average
+ config.state[test_filename].eta = eta
+
+ config.state[test_filename].input_unit_idx = input_unit_idx
+ config.state[test_filename].input_unit_items = input_unit_items
+
+
+ at defer.inlineCallbacks
+def runTestCases(test_cases, options, cmd_line_options):
++ """
++ Run all test cases found in specified files and modules.
++
++ @param test_cases:
++ A list of tuples, each tuple in containing the test_class
++ and test_method to run.
++ @param cmd_line_options:
++ The parsed :attr:`twisted.python.usage.Options.optParameters`
++ obtained from the main ooni commandline.
++ """
+ log.debug("Running %s" % test_cases)
+ log.debug("Options %s" % options)
+ log.debug("cmd_line_options %s" % dict(cmd_line_options))
+
+ test_inputs = options['inputs']
+
+ oonib_reporter = OONIBReporter(cmd_line_options)
+ yaml_reporter = YAMLReporter(cmd_line_options)
if cmd_line_options['collector']:
- oreporter = reporter.OONIBReporter(cmd_line_options['collector'])
+ log.msg("Using remote collector, please be patient while we create the report.")
+ try:
+ yield oonib_reporter.createReport(options)
+ except OONIBReportError:
+ log.err("Error in creating new report")
+ log.msg("We will only create reports to a file")
+ oonib_reporter = None
else:
- oreporter = reporter.YAMLReporter(reportFile)
+ oonib_reporter = None
- input_unit_factory = InputUnitFactory(test_inputs)
+ yield yaml_reporter.createReport(options)
+ log.msg("Reporting to file %s" % config.reports.yamloo)
+
+ try:
+ input_unit_factory = InputUnitFactory(test_inputs)
+ except Exception, e:
+ log.exception(e)
+
+ try:
+ loadResumeFile()
+ except InvalidResumeFile:
+ log.err("Error in loading resume file %s" % config.resume_filename)
+ log.err("Try deleting the resume file")
+ raise InvalidResumeFile
+
+ test_filename = os.path.basename(cmd_line_options['test'])
+
+ if cmd_line_options['resume']:
+ log.debug("Resuming %s" % test_filename)
+ resumeTest(test_filename, input_unit_factory)
+ else:
+ log.debug("Not going to resume %s" % test_filename)
+ config.stateDict[test_filename] = 0
- log.debug("Creating report")
- yield oreporter.createReport(options)
+ updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
- # This deferred list is a deferred list of deferred lists
- # it is used to store all the deferreds of the tests that
- # are run
try:
for input_unit in input_unit_factory:
- log.debug("Running this input unit %s" % input_unit)
- # We do this because generators can't be rewound.
- input_list = list(input_unit)
- for test_case in test_cases:
- log.debug("Processing %s" % test_case[1])
- test_class = test_case[0]
- test_method = test_case[1]
- yield runTestWithInputUnit(test_class, test_method,
- input_list, oreporter)
- except Exception, ex:
- # XXX we probably want to add a log.warn() at some point
- log.warn("Problem in running test")
- log.exception(ex)
-
- oreporter.allDone()
- if reactor.running:
- reactor.stop()
+ log.debug("Running %s with input unit %s" % (test_filename, input_unit))
+
+ yield runTestCasesWithInputUnit(test_cases, input_unit,
+ yaml_reporter, oonib_reporter)
+
+ yield increaseInputUnitIdx(test_filename)
+
+ updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
+
+ except Exception:
+ log.exception("Problem in running test")
+
+class UnableToStartTor(Exception):
+ pass
+
+def startTor():
+ @defer.inlineCallbacks
+ def state_complete(state):
+ config.tor_state = state
+ log.msg("Successfully bootstrapped Tor")
+ log.debug("We now have the following circuits: ")
+ for circuit in state.circuits.values():
+ log.debug(" * %s" % circuit)
+
+ socks_port = yield state.protocol.get_conf("SocksPort")
+ control_port = yield state.protocol.get_conf("ControlPort")
+
+ config.tor.socks_port = int(socks_port.values()[0])
+ config.tor.control_port = int(control_port.values()[0])
+
+ def setup_failed(failure):
+ log.exception(failure)
+ raise UnableToStartTor
+
+ def setup_complete(proto):
+ """
+ Called when we read from stdout that Tor has reached 100%.
+ """
+ log.debug("Building a TorState")
+ state = TorState(proto.tor_protocol)
+ state.post_bootstrap.addCallback(state_complete)
+ state.post_bootstrap.addErrback(setup_failed)
+ return state.post_bootstrap
+
+ def updates(prog, tag, summary):
+ log.msg("%d%%: %s" % (prog, summary))
+
+ tor_config = TorConfig()
+ if config.tor.control_port:
+ tor_config.ControlPort = config.tor.control_port
+ else:
+ control_port = int(randomFreePort())
+ tor_config.ControlPort = control_port
+ config.tor.control_port = control_port
+
+ if config.tor.socks_port:
+ tor_config.SocksPort = config.tor.socks_port
+ else:
+ socks_port = int(randomFreePort())
+ tor_config.SocksPort = socks_port
+ config.tor.socks_port = socks_port
+
+ tor_config.save()
+
+ log.debug("Setting control port as %s" % tor_config.ControlPort)
+ log.debug("Setting SOCKS port as %s" % tor_config.SocksPort)
+
+ d = launch_tor(tor_config, reactor,
+ progress_updates=updates)
+ d.addCallback(setup_complete)
+ d.addErrback(setup_failed)
+ return d
+
+def startSniffing():
+ from ooni.utils.txscapy import ScapyFactory, ScapySniffer
+ try:
+ checkForRoot()
+ except NotRootError:
+ print "[!] Includepcap options requires root priviledges to run"
+ print " you should run ooniprobe as root or disable the options in ooniprobe.conf"
+ sys.exit(1)
+
+ print "Starting sniffer"
+ config.scapyFactory = ScapyFactory(config.advanced.interface)
+
+ sniffer = ScapySniffer(config.reports.pcap)
+ config.scapyFactory.registerProtocol(sniffer)
+
+def loadTest(cmd_line_options):
+ """
+ Takes care of parsing test command line arguments and loading their
+ options.
+ """
+ config.cmd_line_options = cmd_line_options
+ config.generateReportFilenames()
+
+ if cmd_line_options['reportfile']:
+ config.reports.yamloo = cmd_line_options['reportfile']+'.yamloo'
+ config.reports.pcap = config.reports.yamloo+'.pcap'
+
+ if os.path.exists(config.reports.pcap):
+ print "Report PCAP already exists with filename %s" % config.reports.pcap
+ print "Renaming it to %s" % config.reports.pcap+".old"
+ os.rename(config.reports.pcap, config.reports.pcap+".old")
+
+ classes = findTestClassesFromFile(cmd_line_options['test'])
+ test_cases, options = loadTestsAndOptions(classes, cmd_line_options)
+ return test_cases, options, cmd_line_options
diff --cc ooni/utils/hacks.py
index 18e9102,9121982..4cf94d0
--- a/ooni/utils/hacks.py
+++ b/ooni/utils/hacks.py
@@@ -4,9 -4,14 +4,14 @@@
# ********
# When some software has issues and we need to fix it in a
# hackish way, we put it in here. This one day will be empty.
- #
+ #
-# :authors: Arturo Filastò
++# :authors: Arturo Filastò, Isis Lovecruft
# :licence: see LICENSE
+ from yaml.representer import *
+ from yaml.emitter import *
+ from yaml.serializer import *
+ from yaml.resolver import *
import copy_reg
diff --cc ooni/utils/log.py
index 3e24804,a13feb9..2721807
--- a/ooni/utils/log.py
+++ b/ooni/utils/log.py
@@@ -56,17 -57,35 +62,38 @@@ def warn(msg, *arg, **kw)
def err(msg, *arg, **kw):
txlog.err("Error: " + str(msg), logLevel=logging.ERROR, *arg, **kw)
-def exception(msg):
- txlog.err(msg)
- exc_type, exc_value, exc_traceback = sys.exc_info()
- traceback.print_exception(exc_type, exc_value, exc_traceback)
-
-def exception(*msg):
- logging.exception(msg)
+def exception(error):
+ """
+ Error can either be an error message to print to stdout and to the logfile
+ or it can be a twisted.python.failure.Failure instance.
+ """
+ if isinstance(error, Failure):
+ error.printTraceback()
+ else:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ traceback.print_exception(exc_type, exc_value, exc_traceback)
+ def fail(*failure):
+ logging.critical(failure)
+
+ def catch(func):
+ """
+ Quick wrapper to add around test methods for debugging purposes,
+ catches the given Exception. Use like so:
+
+ @log.catcher
+ def foo(bar):
+ if bar == 'baz':
+ raise Exception("catch me no matter what I am")
+ foo("baz")
+ """
+ def _catch(*args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except Exception, exc:
+ exception(exc)
+ return _catch
+
class LoggerFactory(object):
"""
This is a logger factory to be used by oonib
diff --cc ooni/utils/net.py
index 12d1939,1814df2..2495197
--- a/ooni/utils/net.py
+++ b/ooni/utils/net.py
@@@ -10,9 -10,8 +10,10 @@@
# see attached LICENCE file
import sys
+import socket
+from random import randint
+ from ipaddr import IPAddress
from zope.interface import implements
from twisted.internet import protocol, defer
from twisted.internet import threads, reactor
@@@ -125,34 -141,18 +128,42 @@@ def getIfaces(platform_name=None)
else:
raise UnsupportedPlatform
+def randomFreePort(addr="127.0.0.1"):
+ """
+ Args:
+
+ addr (str): the IP address to attempt to bind to.
+
+ Returns an int representing the free port number at the moment of calling
+
+ Note: there is no guarantee that some other application will attempt to
+ bind to this port once this function has been called.
+ """
+ free = False
+ while not free:
+ port = randint(1024, 65535)
+ s = socket.socket()
+ try:
+ s.bind((addr, port))
+ free = True
+ except:
+ pass
+ s.close()
+ return port
+
+
def checkInterfaces(ifaces=None, timeout=1):
"""
+ Check given network interfaces to see that they can send and receive
+ packets. This is similar to :func:`getDefaultIface`, except that function
+ only retrieves the name of the interface which is associated with the LAN,
+ whereas this function validates tx/rx capabilities.
+
@param ifaces:
- A dictionary in the form of ifaces['if_name'] = 'if_addr'.
+ (optional) A dictionary in the form of ifaces['if_name'] = 'if_addr'.
+ @param timeout:
+ An integer specifying the number of seconds to timeout if
+ no reply is received for our pings.
"""
try:
from scapy.all import IP, ICMP
diff --cc ooni/utils/txscapy.py
index 7902133,702d7bb..e91da09
--- a/ooni/utils/txscapy.py
+++ b/ooni/utils/txscapy.py
@@@ -17,146 -18,53 +18,146 @@@ from twisted.internet import reactor, t
from twisted.internet import defer, abstract
from zope.interface import implements
-
+from scapy.config import conf
+ from scapy.all import PcapWriter, MTU
+ from scapy.all import BasePacketList, conf, PcapReader
-
-from scapy.all import conf, Gen, SetGen
++from scapy.all import Gen, SetGen
from ooni.utils import log
+from ooni import config
+
+try:
+ conf.use_pcap = True
+ conf.use_dnet = True
+
+ from scapy.all import PcapWriter
+ from scapy.arch import pcapdnet
+
+ config.pcap_dnet = True
+
+except ImportError, e:
+ log.err("pypcap or dnet not installed. "
+ "Certain tests may not work.")
+
+ config.pcap_dnet = False
+ conf.use_pcap = False
+ conf.use_dnet = False
+
+ from scapy.all import PcapWriter
- from scapy.all import BasePacketList, conf, PcapReader
- from scapy.all import conf, Gen, SetGen, MTU
-class TXPcapWriter(PcapWriter):
- def __init__(self, *arg, **kw):
- PcapWriter.__init__(self, *arg, **kw)
- fdesc.setNonBlocking(self.f)
++from scapy.all import BasePacketList, PcapReader
++from scapy.all import Gen, SetGen, MTU
-class ScapyProtocol(abstract.FileDescriptor):
- def __init__(self, super_socket=None,
- reactor=None, timeout=None, receive=True, *a, **kw):
+def getNetworksFromRoutes():
+ from scapy.all import conf, ltoa, read_routes
+ from ipaddr import IPNetwork, IPAddress
+
- ## Hide the 'no routes' warnings
- conf.verb = 0
-
+ networks = []
+ for nw, nm, gw, iface, addr in read_routes():
+ n = IPNetwork( ltoa(nw) )
+ (n.netmask, n.gateway, n.ipaddr) = [IPAddress(x) for x in [nm, gw, addr]]
+ n.iface = iface
+ if not n.compressed in networks:
+ networks.append(n)
+
+ return networks
+
+class IfaceError(Exception):
+ pass
+
+def getDefaultIface():
+ networks = getNetworksFromRoutes()
+ for net in networks:
+ if net.is_private:
+ return net.iface
+ raise IfaceError
+
+class ProtocolNotRegistered(Exception):
+ pass
+
+class ProtocolAlreadyRegistered(Exception):
+ pass
+
+class ScapyFactory(abstract.FileDescriptor):
+ """
+ Inspired by muxTCP scapyLink:
+ https://github.com/enki/muXTCP/blob/master/scapyLink.py
+ """
+ def __init__(self, interface, super_socket=None, timeout=5):
abstract.FileDescriptor.__init__(self, reactor)
- # By default we use the conf.L3socket
+ if interface == 'auto':
+ interface = getDefaultIface()
if not super_socket:
- super_socket = conf.L3socket()
+ super_socket = conf.L3socket(iface=interface,
+ promisc=True, filter='')
+ #super_socket = conf.L2socket(iface=interface)
+
+ self.protocols = []
+ fdesc._setCloseOnExec(super_socket.ins.fileno())
self.super_socket = super_socket
- self.timeout = timeout
+ def writeSomeData(self, data):
+ """
+ XXX we actually want to use this, but this requires overriding doWrite
+ or writeSequence.
+ """
+ pass
- # This dict is used to store the unique hashes that allow scapy to
- # match up request with answer
- self.hr_sent_packets = {}
+ def send(self, packet):
+ """
+ Write a scapy packet to the wire.
+ """
+ return self.super_socket.send(packet)
- # These are the packets we have received as answer to the ones we sent
- self.answered_packets = []
+ def fileno(self):
+ return self.super_socket.ins.fileno()
- # These are the packets we send
- self.sent_packets = []
+ def doRead(self):
+ packet = self.super_socket.recv(MTU)
+ if packet:
+ for protocol in self.protocols:
+ protocol.packetReceived(packet)
- # This deferred will fire when we have finished sending a receiving packets.
- self.d = defer.Deferred()
- self.debug = False
+ def registerProtocol(self, protocol):
+ if not self.connected:
+ self.startReading()
- self.multi = False
- # XXX this needs to be implemented. It would involve keeping track of
- # the state of the sending via the super socket file descriptor and
- # firing the callback when we have concluded sending. Check out
- # twisted.internet.udp to see how this is done.
- self.receive = receive
+ if protocol not in self.protocols:
+ protocol.factory = self
+ self.protocols.append(protocol)
+ else:
+ raise ProtocolAlreadyRegistered
- def fileno(self):
- return self.super_socket.ins.fileno()
+ def unRegisterProtocol(self, protocol):
+ if protocol in self.protocols:
+ self.protocols.remove(protocol)
+ if len(self.protocols) == 0:
+ self.loseConnection()
+ else:
+ raise ProtocolNotRegistered
+
+class ScapyProtocol(object):
+ factory = None
+
+ def packetReceived(self, packet):
+ """
+ When you register a protocol, this method will be called with argument
+ the packet it received.
+
+ Every protocol that is registered will have this method called.
+ """
+ raise NotImplementedError
+
+class ScapySender(ScapyProtocol):
+ timeout = 5
+
+ # This deferred will fire when we have finished sending a receiving packets.
+ # Should we look for multiple answers for the same sent packet?
+ multi = False
+
+ # When 0 we stop when all the packets we have sent have received an
+ # answer
+ expected_answers = 0
def processPacket(self, packet):
"""
@@@ -172,18 -79,11 +173,18 @@@
if not self.multi:
del(answer_hr[i])
break
+
if len(self.answered_packets) == len(self.sent_packets):
- log.debug("All of our questions have been answered.")
+ # All of our questions have been answered.
self.stopSending()
+ return
- def doRead(self):
+ if self.expected_answers and \
+ self.expected_answers == len(self.answered_packets):
+ log.debug("Got the number of expected answers")
+ self.stopSending()
+
+ def packetReceived(self, packet):
timeout = time.time() - self._start_time
if self.timeout and time.time() - self._start_time > self.timeout:
self.stopSending()
diff --cc ooniprobe.conf.sample
index ed77dfe,0000000..7031ba1
mode 100644,000000..100644
--- a/ooniprobe.conf.sample
+++ b/ooniprobe.conf.sample
@@@ -1,38 -1,0 +1,41 @@@
+# This is the configuration file for OONIProbe
+# This file follows the YAML markup format: http://yaml.org/spec/1.2/spec.html
+# Keep in mind that indentation matters.
+
+basic:
+ # Where OONIProbe should be writing it's log file
+ logfile: ooniprobe.log
+privacy:
+ # Should we include the IP address of the probe in the report?
+ includeip: false
+ # Should we include the ASN of the probe in the report?
+ includeasn: false
+ # Should we include the ASN of the probe in the report?
+ includecountry: false
+ # Should we include the ASN of the probe in the report?
+ includecity: false
+ # Should we collect a full packet capture on the client?
+ includepcap: false
+advanced:
+ # XXX change this to point to the directory where you have stored the GeoIP
+ # database file. This should be the directory in which OONI is installed
+ # /path/to/ooni-probe/data/
+ geoip_data_dir: /usr/share/GeoIP/
+ debug: true
++ # If we're including the client's IP address, how long should we wait for
++ # the connection to complete before timing out?
++ checktimeout: 15
+ tor_binary: '/usr/sbin/tor'
+ # For auto detection
+ interface: auto
+ # Of specify a specific interface
+ #interface: wlan0
+ # If you do not specify start_tor, you will have to have Tor running and
+ # explicitly set the control port and orport.
+ start_tor: true
+tor:
+ #socks_port: 9050
+ #control_port: 9051
+ # Specify the absolute path to the Tor bridges to use for testing
+ bridges: bridges.list
+
More information about the tor-commits
mailing list