How to use the pymongo.UpdateOne function in pymongo

To help you get started, we’ve selected a few pymongo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github brabiega / quakestats / tests / test_datastore.py View on Github external
def test_store_players(self, ds, report):
        ds.store_players(report)

        ds.db.player.bulk_write.assert_called_with([
            pymongo.UpdateOne(
                {'id': '1', 'server_domain': 'domain'},
                {'$set': {
                    'server_domain': 'domain', 'name': 'n1', 'model': 'm1', 'id': '1',
                }},
                upsert=True
            )])
github nguuuquaaa / Belphegor / belphegor / experimental.py View on Github external
def get_update_request(self, member_stats, status):
        member_id = member_stats.id
        items = member_stats.process_status(status, update=True)
        if items:
            return pymongo.UpdateOne(
                {"user_id": member_id},
                {"$push": {"status": {"$each": items}}}
            )
        else:
            return None
github lorien / ioweb / ioweb / mongodb.py View on Github external
def update_one(self, *args, **kwargs):
        self.ops.append(UpdateOne(*args, **kwargs))
        if len(self.ops) >= self.bulk_size:
            return self._write_ops()
        else:
            return None
github glotzerlab / signac / signac / core / pymongodict.py View on Github external
if issues:
            raise BufferedDocumentError(issues)

        # now go through the partial updates without local info
        while self.update_buf:
            jobid, blob = self.update_buf.popitem()
            update_blobs[jobid] = blob

        # construct the query
        ops = []
        for jobid,blob in update_blobs.items():
            update_query = {}
            for key,val in blob.items():
                update_query['doc'+'.'+key] = val
            update_query['doc_last_modified'] = time_current
            ops.append(UpdateOne({'_id': jobid}, {'$set': update_query}))

        if len(ops):
            try:
                # submit the query as a parallel bulk write operation
                self.collection.bulk_write(ops,ordered=False)
            except BulkWriteError:
                logger.error(str(error))
                raise
github aleph-im / pyaleph / src / aleph / chains / common.py View on Github external
'content': content,
            'item_content': message.get('item_content'),
            'item_type': message.get('item_type'),
            'channel': message.get('channel'),
            'signature': message.get('signature')
        }
        should_commit = True
        #await Message.collection.insert_one(message)

        # since it's on-chain, we need to keep that content.
        # if message['item_type'] == 'ipfs' and app['config'].ipfs.enabled.value:
        #     LOGGER.debug("Pining hash %s" % hash)
            # await pin_hash(hash)

    if should_commit:
        action = UpdateOne(filters, updates, upsert=True)
        if not bulk_operation:
            await Message.collection.bulk_write([action])
        else:
            return action
    return True  # message handled.
github MolSSI / QCFractal / qcfractal / storage_sockets / base_mongo_socket.py View on Github external
Returns
        -------

        """
        # Very dangerous, we need to modify this substatially
        # Does not currently handle multiple identical commands
        # Only handles service updates

        bulk_commands = []
        for hook_list in hooks:
            for hook in hook_list:
                commands = {}
                for com in hook["updates"]:
                    commands["$" + com[0]] = {com[1]: com[2]}

                upd = pymongo.UpdateOne({"_id": ObjectId(hook["document"][1])}, commands)
                bulk_commands.append(upd)

        if len(bulk_commands) == 0:
            return

        ret = self._tables["service_queue"].bulk_write(bulk_commands, ordered=False)
        return ret
github Clinical-Genomics / scout / scout / adapter / mongo / variant_loader.py View on Github external
# Get all variants sorted by rank score
        variants = self.variant_collection.find(
            {
                "case_id": case_obj["_id"],
                "category": category,
                "variant_type": variant_type,
            }
        ).sort("rank_score", pymongo.DESCENDING)

        LOG.info("Updating variant_rank for all variants")

        requests = []

        for index, var_obj in enumerate(variants):

            operation = pymongo.UpdateOne(
                {"_id": var_obj["_id"]}, {"$set": {"variant_rank": index + 1}}
            )
            requests.append(operation)

            if not len(requests) > 5000:
                continue
            try:
                self.variant_collection.bulk_write(requests, ordered=False)
                requests = []
            except BulkWriteError as err:
                LOG.warning("Updating variant rank failed")
                raise err

        # Update the final bulk
        if len(requests) > 0:
            try:
github man-group / arctic / arctic / store / _ndarray_store.py View on Github external
'data': Binary(chunk),
                'compressed': True,
                'segment': min((i + 1) * rows_per_chunk - 1, length - 1) + segment_offset,
            }
            segment_index.append(segment['segment'])
            sha = checksum(symbol, segment)
            segment_spec = {'symbol': symbol, 'sha': sha, 'segment': segment['segment']}

            if ARCTIC_FORWARD_POINTERS_CFG is FwPointersCfg.DISABLED:
                if sha not in symbol_all_previous_shas:
                    segment['sha'] = sha
                    bulk.append(pymongo.UpdateOne(segment_spec,
                                                  {'$set': segment, '$addToSet': {'parent': version['_id']}},
                                                  upsert=True))
                else:
                    bulk.append(pymongo.UpdateOne(segment_spec,
                                                  {'$addToSet': {'parent': version['_id']}}))
            else:
                version_shas.add(sha)

                # We only keep for the records the ID of the version which created the segment.
                # We also need the uniqueness of the parent field for the (symbol, parent, segment) index,
                # because upon mongo_retry "dirty_append == True", we compress and only the SHA changes
                # which raises DuplicateKeyError if we don't have a unique (symbol, parent, segment).
                set_spec = {'$addToSet': {'parent': version['_id']}}

                if sha not in symbol_all_previous_shas:
                    segment['sha'] = sha
                    set_spec['$set'] = segment
                    bulk.append(pymongo.UpdateOne(segment_spec, set_spec, upsert=True))
                elif ARCTIC_FORWARD_POINTERS_CFG is FwPointersCfg.HYBRID:
                    bulk.append(pymongo.UpdateOne(segment_spec, set_spec))
github Clinical-Genomics / scout / scout / adapter / mongo / variant_loader.py View on Github external
def update_mongo_compound_variants(self, bulk):
        """Update the compound information for a bulk of variants in the database

            Args:
                bulk(dict): {'_id': scout.models.Variant}

        """
        requests = []
        for var_id in bulk:
            var_obj = bulk[var_id]
            if not var_obj.get("compounds"):
                continue
            # Add a request to update compounds
            operation = pymongo.UpdateOne(
                {"_id": var_obj["_id"]}, {"$set": {"compounds": var_obj["compounds"]}}
            )
            requests.append(operation)

        if not requests:
            return

        try:
            self.variant_collection.bulk_write(requests, ordered=False)
        except BulkWriteError as err:
            LOG.warning("Updating compounds failed")
            raise err
github VOLTTRON / volttron / services / core / MongodbHistorian / mongodb / historian.py View on Github external
    @staticmethod
    def insert_to_daily(db, data_id, topic_id, ts, value):
        rollup_day = ts.replace(hour=0, minute=0, second=0,
                                         microsecond=0)
        position = ts.hour * 60 + ts.minute
        sum_value = MongodbHistorian.value_to_sumable(value)

        one = UpdateOne({'ts': rollup_day, 'topic_id': topic_id},
                        {'$push': {"data." + str(position): [ts, value]},
                         '$inc': {'count': 1, 'sum': sum_value},
                         '$set': {'last_updated_data': data_id}})
        return one