How to use the pyshark.FileCapture function in pyshark

To help you get started, we’ve selected a few pyshark examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github marten-seemann / quic-network-simulator / interop / trace.py View on Github external
def get_all_packets(self, direction: Direction = Direction.ALL) -> pyshark.FileCapture:
    """ Get all packets.
    """
    f = self._get_direction_filter(direction) + "quic"
    return pyshark.FileCapture(self._filename, display_filter=f)
github z4yx / Router-Lab / Homework / checksum / grade.py View on Github external
start_time = time.time()

        while p.poll() is None:
            if time.time() - start_time > 1:
                p.kill()

        try:
            out = [line.strip() for line in open(out_file, 'r').readlines() if line.strip()]
            ans = [line.strip() for line in open(ans_file, 'r').readlines() if line.strip()]
                
            if out == ans:
                grade += 1
            elif os.isatty(1):
                limit = 1
                count = 0
                reader = pyshark.FileCapture(in_file)
                packets = list(reader)
                print('Wrong Answer (showing only first {} packets):'.format(limit))
                for i in range(len(ans)):
                    if i >= len(out) or out[i] != ans[i]:
                        count += 1
                        print('Answer is wrong for packet #{}: {}'.format(i, packets[i]['ip']))
                        if count == limit:
                            break
                print('Diff: ')
                os.system('diff -u {} {} | head -n 10'.format(out_file, ans_file))
                reader.close()
        except Exception:
            if os.isatty(1):
                print('Unexpected exception caught:')
                traceback.print_exc()
github CERTCC / tapioca / checknet.py View on Github external
net.dnsmap, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)
                pickle.dump(
                    net.dnsreqs, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

#        if os.path.exists(targetspkl) and not force:
#            eprint('Loading cached targets...')
#            with open(targetspkl, 'rb') as pklhandle:
#                try:
#                    net.targets = pickle.load(pklhandle)
#                    targetscacheloaded = True
#                except:
#                    pass

        if not targetscacheloaded:
            if fullpacket:
                packets = pyshark.FileCapture(
                    pcapfile, keep_packets=False)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(
                    net.get_hosts_contacted_fullpacket, timeout=1000)
            else:
                packets = pyshark.FileCapture(
                    pcapfile, keep_packets=False, only_summaries=True)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted, timeout=1000)
#                with open(targetspkl, 'wb') as pklhandle:
#                    pickle.dump(
# net.targets, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

        # Print report
github CERTCC / tapioca / checkcrypto.py View on Github external
net.dnsmap, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

        if os.path.getsize(pcapfile) > 100000000 and not largewarned:
            # Over 100MB
            eprint(
                color.bright(color.yellow('Warning: capture size is large. Please be patient.')))
            largewarned = True

        sslpackets = pyshark.FileCapture(
            pcapfile, keep_packets=False, display_filter='ssl')

        eprint('Getting SSL info from capture...')
        # get_indexed_ssl_info(cap)
        sslpackets.apply_on_packets(net.get_ssl_info, timeout=1000)

        dtlspackets = pyshark.FileCapture(
            pcapfile, keep_packets=False, display_filter='dtls')

        eprint('Getting DTLS info from capture...')
        dtlspackets.apply_on_packets(net.get_dtls_info, timeout=1000)

        # Print report
        generate_report(app, pcapfile=pcapfile)

        # Reset globals
        net.clear()
github vnetman / pcap2csv / pcap2csv.py View on Github external
"""Main entry function called from main to process the pcap and
    generate the csv file.

    in_pcap = name of the input pcap file (guaranteed to exist)
    out_csv = name of the output csv file (will be created)

    This function walks over each packet in the pcap file, and for
    each packet invokes the render_csv_row() function to write one row
    of the csv.
    """

    # Open the pcap file with PyShark in "summary-only" mode, since this
    # is the mode where the brief textual description of the packet (e.g.
    # "Standard query 0xf3de A www.cisco.com", "Client Hello" etc.) are
    # made available.
    pcap_pyshark = pyshark.FileCapture(in_pcap, only_summaries=True)
    pcap_pyshark.load_packets()
    pcap_pyshark.reset()

    frame_num = 0
    ignored_packets = 0
    with open(out_csv, 'w') as fh_csv:
        # Open the pcap file with scapy's RawPcapReader, and iterate over
        # each packet. In each iteration get the PyShark packet as well,
        # and then call render_csv_row() with both representations to generate
        # the CSV row.
        for (pkt_scapy, _) in RawPcapReader(in_pcap):
            try:
                pkt_pyshark = pcap_pyshark.next_packet()
                frame_num += 1
                if not render_csv_row(pkt_pyshark, pkt_scapy, fh_csv):
                    ignored_packets += 1
github lixmk / Concierge / amag / symmetry-pcap2cards.py View on Github external
def amag_parse(infile):
    print "[*] Loading pcap: "+infile+" ..."
    pcap = pyshark.FileCapture(infile, display_filter='tcp.port == 3001 && (frame contains "8Mt")')
    pcap.load_packets()
    num = len(pcap)
    print "[*] Parsing pcap for AMAG Symmetry badge numbers..."
    for packet in range(0 , num):
        pdata = str(pcap[packet].data.get_field_value('data'))
        full = pdata[-28:-12]
        raw_cn = re.findall('..',full[:10])
        raw_fc = re.findall('..',full[-6:])
        cn = int(str(int(str(int(str("0x"+raw_cn[0]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_cn[1]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_cn[2]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_cn[3]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_cn[4]), 16)-0x10).zfill(2))).zfill(2))
        fc = int(str(int(str(int(str("0x"+raw_fc[0]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_fc[1]), 16)-0x10).zfill(2))).zfill(2)+str(int(str(int(str("0x"+raw_fc[2]), 16)-0x10).zfill(2))).zfill(2))
        if cn > 0:
            with open("amag-badges.csv","a+")as f:
                f.write(str(cn)+","+str(fc)+","+infile+"\n")
            print "[+] CN: "+str(cn)+" FC:"+str(fc)
github slgobinath / pcap-processor / pcap_processor / reader.py View on Github external
def _read_pcap(self, path):
        logger.debug("Reading pcap file: %s", path)
        packets = pyshark.FileCapture(path)
        for pcap in packets:
            has_transport = pcap.transport_layer is not None
            packet_time = float(pcap.sniff_timestamp)
            packet_dict = dict()
            highest_layer = pcap.highest_layer.upper()
            packet_dict["highest_layer"] = highest_layer
            if has_transport:
                packet_dict["transport_layer"] = pcap.transport_layer.upper()
            else:
                packet_dict["transport_layer"] = "NONE"
                packet_dict["src_port"] = -1
                packet_dict["dst_port"] = -1
                packet_dict["transport_flag"] = -1

            packet_dict["timestamp"] = int(packet_time * 1000)
            packet_dict["time"] = str(pcap.sniff_time)
github demisto / content / Scripts / script-PcapHTTPExtractor.py View on Github external
def parse_pcap_http(pcap_file_path):
    """
        Prints the headers and body of the http response.
        By deafult returns a dict of the results.
        Will return and exit if no http flows found

        :param pcap_file_path: local path of file (after getting the entryID)
        :return: return all http packets in a list
    """

    packets = pyshark.FileCapture(pcap_file_path)
    http_packets = []

    for packet in packets:

        # Check if this is an HTTP Packet
        if 'http' in packet:
            packet = parse_capture(packet)
            http_packets.append(packet)

    if len(http_packets) == 0:
        return_error('No HTTP flows found in specified file.')

    return http_packets
github CyberReboot / NetworkML / networkml / parsers / pcap / reader.py View on Github external
def packetizer(path):
    '''
    Reads a pcap specified by the path and parses out the packets.
    Packets will be stored with a tuple key formatted as follows:
    (datetime, sIP:sPort, dIP:dPort, protocol, length)

    Args:
        path: Path to pcap to read

    Returns:
        packet_dict: Dictionary of packets with keys formatted as above
    '''
    packet_dict = OrderedDict()
    highest_layers_dict = {}
    with pyshark.FileCapture(path, use_json=True, include_raw=True, keep_packets=False,
            custom_parameters=['-o', 'tcp.desegment_tcp_streams:false', '-n']) as cap:
        for packet in cap:
            data = packet.get_raw_packet()
            head = parse_packet_head(packet)
            if head is not None:
                keys, highest_layers = head
                packet_dict[keys] = binascii.hexlify(data).decode('utf-8')
                for key, highest_layer in highest_layers.items():
                    if key not in highest_layers_dict:
                        highest_layers_dict[key] = set()
                    highest_layers_dict[key].update({highest_layer})
    return packet_dict, highest_layers_dict