def read_file(self, filename):
if not scapy_installed:
exit("Could not read pcap due to missing scapy")
self.params['ppid']=0
print "Opening pcap file %s" % filename
packets=rdpcap(filename)
for p in packets:
if scapy_sctp and SCTPChunkData in p:
msg=p.data
elif (TCP in p and Raw in p) or UDP in p or (Ethernet in p and Raw in p):
msg = p.load
if p.time >= self.start and p.time <= self.stop:
self.packets.append(msg)
self.uniquesamples.add(msg)
ppid=getattr(p,'proto_id',0)
if self.params['ppid'] != ppid:
self.params['ppid'] = ppid
# This is not used so don't print
# if self.params['ppid'] != 0:
# print "Using PPID %d" % self.params['ppid']
python类rdpcap()的实例源码
def flooder(self, n, filename):
print('Reading pcap file.')
pkgs = rdpcap(filename)
for i in range(n):
print('Sending %s packets.' % (len(pkgs)))
sendpfast(pkgs)
print('Done, part %s of %s' % ((i + 1), n))
def get_key_in(pcap_file):
""" Extract keyboard input reports from a pcap file.
The extraction depends only on the size of packets. """
pcap = rdpcap(pcap_file)
in_reports = []
for p in pcap:
if len(p) == 35:
in_reports.append(p.load[-8:])
return in_reports
def cap_session(pcap_path):
capture = s.rdpcap(pcap_path) # TODO when go live change to session capture
first = True
curr_session = None
session_info = [0, ] * 3
for pkt in capture:
if not pkt.haslayer(s.TCP) and not pkt.haslayer(s.IP) and pkt.len <= 0:
continue
if first:
first = False
if is_client(pkt):
session_info[0] = pkt[s.IP].src
session_info[1] = pkt[s.IP].dst
session_info[2] = "TCP"
curr_session = Session(pkt, session_info, session_info[0])
else:
session_info[0] = pkt[s.IP].dst
session_info[1] = pkt[s.IP].src
session_info[2] = "TCP"
curr_session = Session(pkt, session_info, session_info[0])
else:
curr_session.update_session(pkt)
return curr_session
def get_wep_data_count(self):
wep_packets = None
try:
wep_packets = rdpcap(self.log_path)
except Exception as e:
print "[-] Error reading pcap file:", str(e)
return
n_data_packets = 0
for p in wep_packets:
if Dot11WEP in p:
if p.iv is not None and p.iv != '':
n_data_packets += 1
return n_data_packets
def read_pcap(filename):
"""
@param filename: Filesystem path to the pcap.
Returns:
[{"client": "\x17\x52\x15"}, {"server": "\x17\x15\x13"}]
"""
from scapy.all import IP, Raw, rdpcap
packets = rdpcap(filename)
checking_first_packet = True
client_ip_addr = None
server_ip_addr = None
ssl_packets = []
messages = []
"""
pcap assumptions:
pcap only contains packets exchanged between a Tor client and a Tor
server. (This assumption makes sure that there are only two IP addresses
in the pcap file)
The first packet of the pcap is sent from the client to the server. (This
assumption is used to get the IP address of the client.)
All captured packets are TLS packets: that is TCP session
establishment/teardown packets should be filtered out (no SYN/SYN+ACK)
"""
"""
Minimally validate the pcap and also find out what's the client
and server IP addresses.
"""
for packet in packets:
if checking_first_packet:
client_ip_addr = packet[IP].src
checking_first_packet = False
else:
if packet[IP].src != client_ip_addr:
server_ip_addr = packet[IP].src
try:
if (packet[Raw]):
ssl_packets.append(packet)
except IndexError:
pass
"""Form our list."""
for packet in ssl_packets:
if packet[IP].src == client_ip_addr:
messages.append({"client": str(packet[Raw])})
elif packet[IP].src == server_ip_addr:
messages.append({"server": str(packet[Raw])})
else:
raise("Detected third IP address! pcap is corrupted.")
return messages
def parse(f, proto=None):
logger = logging.getLogger('pcap-to-data-flow')
logger.info('Reading %s as %s proto ...', repr(f), proto)
# @see http://kisom.github.io/pypcapfile/
# @see https://stackoverflow.com/questions/42963343/reading-pcap-file-with-scapy
packets = rdpcap(f)
packets_count = len(packets)
packets_time_diff = packets[-1].time - packets[0].time
logger.info('Packets read: %d / sniffed in %.2f sec / %s', packets_count, packets_time_diff, repr(packets))
# logger.info('First one: %s', repr(packets[0]))
# logger.info('Last one: %s', repr(packets[-1]))
packets = [
(packet['IP'], packet['Raw'])
for packet in packets
]
# print(packets)
# protocol specific handling
if proto == 'redis':
packets = map(parse_redis_packet, packets)
elif proto =='scribe':
packets = map(parse_scribe_packet, packets)
elif proto is None:
packets = map(parse_raw_packet, packets)
else:
raise Exception('Unsupported proto: %s', proto)
# remove empty entries
packets = filter(lambda x: x is not None, packets)
# and sort starting with the most frequent ones
stats = Counter(packets)
(_, top_freq) = stats.most_common(1)[0] # the most frequent entry with have edge weight = 1
packets = [
'{}\t{:.4f}'.format(val, 1. * freq / top_freq)
for (val, freq) in stats.most_common()
]
print('# processed {} packets sniffed in {:.2f} sec as {}'.format(packets_count, packets_time_diff, proto))
print('\n'.join(packets))