How to use the cbapi.response.models.Binary function in cbapi

To help you get started, we’ve selected a few cbapi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github carbonblack / cbapi-python / test / regression / create_requests_cache_ng.py View on Github external
def large_binary_search():
    binary = cb.select(Binary).where('observed_filename=svchost.exe').first()
    print binary
github carbonblack / cbapi-python / tests / test_cber.py View on Github external
def test_all_binary(self):
        binary_query = self.c.select(Binary).where('')
        if use_golden:
            test_result = getTestResult('test_all_binary')[0]
            assert_equal(len(binary_query),
                         test_result,
                         "Number of Binaries returned should be {0}, but received {1}".format(
                             test_result, len(binary_query)))
        else:
            insertResult('test_all_binary', str(len(binary_query)))
github carbonblack / cbapi-python / examples / response / new_binaries_with_netconns.py View on Github external
opts = parser.parse_args()
    if not opts.date:
        parser.print_usage()
        sys.exit(-1)
    #
    # Initalize the cbapi-ng
    # TODO get_cb_object
    #
    cb = get_cb_response_object(opts)

    #
    # Main Query
    #
    start_date = "[" + opts.date + "T00:00:00 TO *]"
    binary_query = cb.select(Binary).where(("host_count:[1 TO 3]"
                                            " server_added_timestamp:" + start_date +
                                            " -observed_filename:*.dll"
                                            " -digsig_publisher:Microsoft*"
                                            " -alliance_score_srstrust:*"))
    #
    # Setup the csv writer
    #
    if not opts.output_file:
        output_file = open("new_binaries_with_netconns.csv", 'wb')
    else:
        output_file = open(opts.output_file, 'wb')
    csv_writer = csv.writer(output_file)
    #
    # Write out CSV header
    #
    csv_writer.writerow(("FileName", "Hostname", "Username", "Network Connections",
github carbonblack / cb-integration / cbint / detonation.py View on Github external
def insert_binaries_from_db(self):
        """
        :return:
        """
        try:
            while True:
                try:
                    query = Binary.select(Binary.md5).where((Binary.stop_future_scans == False) |
                                                            (Binary.force_rescan == True)).order_by(
                        Binary.server_added_timestamp.desc(),
                        Binary.from_rabbitmq.desc()).limit(500)

                    cursor = db.execute(query)
                    for item in cursor:
                        md5 = item[0]
                        self.binary_insert_queue(md5)

                except Exception as e:
                    logger.error(traceback.format_exc())
                    report_error_statistics(str(e))

                time.sleep(.1)
        except:
            logger.error(traceback.format_exc())
github carbonblack / cbapi-python / examples / response / binary_download.py View on Github external
def main():
    parser = build_cli_parser()
    parser.add_argument("--md5", help="binary query", required=True)
    parser.add_argument("--filename", help="local filename to save the binary as", required=True)
    args = parser.parse_args()

    cb = get_cb_response_object(args)
    binary = cb.select(Binary, args.md5)
    shutil.copyfileobj(binary.file, open(args.filename, "wb"))

    print("-> Downloaded binary %s [%u bytes]" % (args.md5, binary.size))

    return 0
github airbnb / binaryalert / lambda_functions / downloader / main.py View on Github external
def _process_md5(md5: str) -> bool:
    """Download the given file from CarbonBlack and upload to S3, returning True if successful."""
    try:
        binary = CARBON_BLACK.select(Binary, md5)
        _upload_to_s3(binary)
        return True
    except zipfile.BadZipFile:
        LOGGER.exception('[BadZipFile] Error downloading %s', md5)
        LOGGER.info('This md5 is invalid and will not retried')
        return False
    except (BotoCoreError, ServerError):
        LOGGER.exception('Error downloading %s', md5)
        LOGGER.error(
            'A temporary error was encountered during downloading. This md5 will be '
            'retried at a later time.'
        )
        raise
    except ObjectNotFoundError:
        LOGGER.exception('Error downloading %s', md5)
        LOGGER.info(
github airbnb / binaryalert / lambda_functions / downloader / copy_all.py View on Github external
# Create process communication queues.
    tasks = JoinableQueue(MAX_TASK_QUEUE_SIZE)  # CopyTasks to execute.
    failures: Queue = Queue()  # A list of MD5s which failed to copy.

    # Start the consumer processes.
    LOGGER.info('Start %d consumers', NUM_CONSUMERS)
    consumers = [Consumer(tasks, failures) for _ in range(NUM_CONSUMERS)]
    for worker in consumers:
        worker.start()

    # Enumerate all CarbonBlack binaries and enqueue a CopyTask for each one.
    # This main thread is the producer, enqueuing CopyTasks as fast as it can enumerate the
    # binaries (which actually takes a relatively long time).
    # As soon as a CopyTask is enqueued, any worker process (Consumer) can immediately execute it.
    for index, binary in enumerate(main.CARBON_BLACK.select(cbapi.response.models.Binary).all()):
        copy_task = CopyTask(index, binary.md5)
        LOGGER.debug('Enqueuing %s', copy_task)
        tasks.put(copy_task)  # Block if necessary until the task queue has space.

    # Add a "poison pill" for each Consumer, marking the end of the task queue.
    for _ in range(NUM_CONSUMERS):
        tasks.put(None)

    # Wait for all of the tasks to finish.
    tasks.join()
    LOGGER.info('All CopyTasks Finished!')

    # Grab the MD5s which failed to copy, if any.
    failed_md5s = []
    while True:
        try:
github carbonblack / cbapi-python / examples / response / cmd_exe_filemods.py View on Github external
parser = build_cli_parser("Search for cmd.exe writing to exe and dll filepaths")

    args = parser.parse_args()
    cb = get_cb_response_object(args)

    for proc in cb.select(Process).where("process_name:cmd.exe (filemod:*.exe or filemod:*.dll)"):
        for fm in proc.filemods:
            if not fm.path.lower().endswith((".exe", ".dll")):
                continue

            signed = ""
            product_name = ""

            if fm.type == "LastWrote" and fm.md5:
                try:
                    b = cb.select(Binary, fm.md5)
                    signed = b.signed
                    product_name = b.product_name
                except ObjectNotFoundError:
                    pass

            print("%s,%s,%s,%s,%s,%s,%s,%s,%s" % (str(fm.timestamp), proc.hostname, proc.username, proc.path,
                                                  fm.path, fm.type, fm.md5, signed, product_name))
github carbonblack / cb-integration / cbint / detonation.py View on Github external
def submit_binary_to_db_and_queue(message):

            with db.transaction() as txn:
                try:
                    bin = Binary()
                    msg = json.loads(message)
                    bin.md5 = msg.get("md5")
                    logger.debug("Submitting binary to db and queue: {}".format(bin.md5))
                    bin.from_rabbitmq = True
                    # bin.server_added_timestamp = datetime.fromtimestamp(msg.get("event_timestamp")).isoformat()#datetime.fromtimestamp(msg.get("event_timestamp"))
                    #
                    # Save into database
                    #
                    bin.save()
                    #
                    # Testing this out for performance
                    # self.binary_insert_queue(bin.md5, 1)
                    #
                except Exception as e:
                    logger.debug("Exception in async consumer....")
                    logger.debug(e)
github carbonblack / cbapi-python / examples / response / dump_all_binaries.py View on Github external
def dump_all_binaries(cb, destdir, query=None):
    threads = []
    num_worker_threads = 10
    for i in range(num_worker_threads):
        t = BinaryWorker(destdir)
        t.daemon = True
        t.start()
        threads.append(t)
    if not query:
        for binary in cb.select(Binary).all():
            worker_queue.put(binary)
    else:
        for binary in cb.select(Binary).where(query).all():
            worker_queue.put(binary)
    worker_queue.join()