How to use the pymongo.ASCENDING function in pymongo

To help you get started, we’ve selected a few pymongo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github trendrr / whirlwind / whirlwind / core / request.py View on Github external
	@staticmethod
	def paged_list(handler,table_class,select=None):
	
		page = handler.get_argument('page',1)
		page = page if page >= 1 else 1
		
		count = handler.get_argument('count',10)
		count = count if count >= 1 else 10
		
		sort = None
		order_by = handler.get_argument('order_by',None)
		
		try:
			if order_by:
				order = handler.get_argument('order',None)
				order = pymongo.DESCENDING if order.lower() == 'desc' else pymongo.ASCENDING
				sort = {
					order:order_by
				}

			if select:
				if sort:
					results = table_class.find(select).skip((page-1)*count).limit(count).sort(sort)
				else:
					results = table_class.find(select).skip((page-1)*count).limit(count)
				
				total = table_class.find(select).count()
			else:
				if sort:
					results = table_class.find().skip((page-1)*count).limit(count).sort(sort)
				else:
					results = table_class.find().skip((page-1)*count).limit(count)
github DataBrewery / cubes / cubes / backends / mongo / browser.py View on Github external
self.logger.debug("facts: getting all fields")
        else:
            attributes = self.cube.get_attributes(fields)
            self.logger.debug("facts: getting fields: %s" % fields)

        # Prepare the query
        query_obj, fields_obj = self._build_query_and_fields(cell, [], for_project=False)

        # TODO include fields_obj, fully populated
        cursor = self.data_store.find(query_obj)

        order = self.prepare_order(order)
        if order:
            order_obj = self._order_to_sort_object(order)
            k, v = order_obj.iteritems().next()
            cursor = cursor.sort(k, pymongo.DESCENDING if v == -1 else pymongo.ASCENDING)

        if page_size and page > 0:
            cursor = cursor.skip(page * page_size)

        if page_size and page_size > 0:
            cursor = cursor.limit(page_size)

        facts = MongoFactsIterator(cursor, attributes, self.mapper,
                                   self.datesupport)

        return facts
github CounterpartyXCP / counterblock / counterblock / lib / modules / dex / assets_trading.py View on Github external
for a in [config.XCP, config.BTC]:
            _7d_history = config.mongo_db.trades.aggregate([
                {"$match": {
                    "base_asset": a,
                    "quote_asset": asset,
                    "block_time": {"$gte": start_dt_7d}
                }},
                {"$project": {
                    "year":  {"$year": "$block_time"},
                    "month": {"$month": "$block_time"},
                    "day":   {"$dayOfMonth": "$block_time"},
                    "hour":  {"$hour": "$block_time"},
                    "unit_price": 1,
                    "base_quantity_normalized": 1  # to derive volume
                }},
                {"$sort": {"block_time": pymongo.ASCENDING}},
                {"$group": {
                    "_id":   {"year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
                    "price": {"$avg": "$unit_price"},
                    "vol":   {"$sum": "$base_quantity_normalized"},
                }},
            ])
            _7d_history = list(_7d_history)
            if a == config.XCP:
                _7d_history_in_xcp = _7d_history
            else:
                _7d_history_in_btc = _7d_history
    else:  # get the XCP/BTC market and invert for BTC/XCP (_7d_history_in_btc)
        _7d_history = config.mongo_db.trades.aggregate([
            {"$match": {
                "base_asset": config.XCP,
                "quote_asset": config.BTC,
github ospaf / ospaf-primary / GithubUser / GitDataPrepare / event / event.py View on Github external
def generateToFile(self, output_file):
        info = self.task.getInfo()
        start_id = info["start"]
        end_id = info["end"]
        if info.has_key("current"):
            start_id = info["current"]
            print "Find unfinished task, continue to work at " + str(start_id)

        query = {"id": {"$gte": start_id, "$lt": end_id}}

        res = self.db["user"].find(query).sort("id", pymongo.ASCENDING)
        res_len = res.count()
        i = 0
        fw = open(output_file, "w")
        for item in res:
            fw.write(item["login"])
            fw.write("\n")
github alessandrodd / playstore_crawler / db_interface.py View on Github external
CRAWLQUEUE_COLLECTION_NAME = "crawler_queue"

# configure remote dump location
client = pymongo.MongoClient(config.mongodb["address"], config.mongodb["port"], username=config.mongodb["user"], password=config.mongodb["password"])
db = client[config.mongodb["name"]]
playstore_col = db[PLAYSTORE_COLLECTION_NAME]
# for a given device and a given version code, an app should be unique
playstore_col.create_index(
    [("docid", pymongo.DESCENDING), ("details.appDetails.versionCode", pymongo.DESCENDING),
     ("device", pymongo.DESCENDING)], unique=True)
# ensure high performance for undownloaded apk retrieval
playstore_col.create_index([("download_start_time", pymongo.ASCENDING), ("offer.micros", pymongo.ASCENDING)])
crawlqueue_col = db[CRAWLQUEUE_COLLECTION_NAME]
# these index ensure high performance for the crawling scheduling operations
crawlqueue_col.create_index([("data", pymongo.DESCENDING), ("task", pymongo.DESCENDING)], unique=True)
crawlqueue_col.create_index([("start_time", pymongo.ASCENDING)])
crawlqueue_col.create_index([("start_time", pymongo.ASCENDING), ("end_time", pymongo.ASCENDING)])


@retry(pymongo.errors.AutoReconnect, tries=5, timeout_secs=1)
def dump_to_mongodb(entries, target_collection):
    """
    Dumps the entries to a given mongodb collection

    :param entries: collection of objects to dump
    :param target_collection: name of the collection where to save the entries
    """
    if not entries:
        return
    try:
        target_collection.insert_many(entries, False)
    except BulkWriteError as bwe:
github ospaf / ospaf-primary / GithubUser / git_data_prepare / event / event_list_loop.py View on Github external
DMTask().updateTask("github", self.task, {"status": "running"})

        start_id = self.task["start"]
        end_id = self.task["end"]
        if self.task.has_key("current"):
            start_id = self.task["current"]
            print "Find unfinished task, continue to work at " + str(start_id)

        if end_id <= start_id:
# This should be checked in DMTask
            print "Error in the task"
            return

        query = {"id": {"$gte": start_id, "$lt": end_id}}

        res = self.db["user"].find(query).sort("id", pymongo.ASCENDING)
        res_len = res.count()
        i = 0
        percent_gap = res_len/100
        for item in res:
            updated_date = item["updated_at"]
            i += 1
            if active_date(updated_date):
                upload_user_event(self.db, item["login"])
            if percent_gap == 0:
                percent = 1.0 * i / res_len
                DMTask().updateTask("github", self.task, {"current": item["id"], "percent": percent})
#save every 100 calculate 
            elif i%percent_gap == 0:
                percent = 1.0 * i / res_len
                DMTask().updateTask("github", self.task, {"current": item["id"], "percent": percent})
github terbo / sigmon / app / sigmon.py View on Github external
def first_setup():
  collections = list(db.collection_names())

  if 'probes' not in collections: db.create_collection('probes')
  db.probes.drop_indexes()
  db.probes.create_index('_created',sparse=True, background=True)
  db.probes.create_index('time',sparse=True, background=True)
  db.probes.create_index('mac',sparse=True, background=True)
  db.probes.create_index([('time',pymongo.ASCENDING) ,('mac',pymongo.TEXT)],sparse=True, background=True)

  if 'probes.hourly' not in collections: db.create_collection('probes.hourly')
  db.probes.hourly.drop_indexes()
  db.probes.hourly.create_index('hour',sparse=True, background=True)
  db.probes.hourly.create_index('mac',sparse=True, background=True)
  db.probes.hourly.create_index([('hour',pymongo.ASCENDING) ,('mac',pymongo.TEXT)],sparse=True, background=True)
  
  if 'probes.daily' not in collections: db.create_collection('probes.daily')
  db.probes.daily.drop_indexes()
  db.probes.daily.create_index('day',sparse=True, background=True)
  db.probes.daily.create_index('mac',sparse=True, background=True)
  db.probes.daily.create_index([('day',pymongo.ASCENDING) ,('mac',pymongo.TEXT)],sparse=True, background=True)

  if 'sensors' not in collections: db.create_collection('sensors')
  
  if 'ssids' not in collections: db.create_collection('ssids')
  
  if 'vendors' not in collections: db.create_collection('vendors')
  
  if 'aps' not in collections: db.create_collection('aps')
  db.aps.drop_indexes()
  db.aps.create_index('mac',sparse=True, background=True)
github kurtiss / monque / monque / base.py View on Github external
result = None

        if grabfor:
            extra = [('update', {'$set': {"scheduled_time": now + datetime.timedelta(seconds=grabfor)}})]
        else:
            extra = [('remove', True)]
            
        query = dict(
            scheduled_time = {'$lte' : now},
            retries = {'$gt' : 0}
        )

        if not ordered:
            capture_token = self._random_token()
            directions = (('$gte', pymongo.ASCENDING), ('$lt', pymongo.DESCENDING))

            for operator, order in directions:
                query['random_token'] = { operator : capture_token }
                try:
                    result = self.mongodb.command(pymongo.son.SON([
                        ('findandmodify', c.name),
                        ('query', query),
                        ('sort', dict(random_token = order)),
                    ] + extra))
                except pymongo.errors.OperationFailure:
                    pass # No matching object found
                else:
                    break
        else:
            try:
                result = self.mongodb.command(pymongo.son.SON([
github openstates / billy / billy / utils / popularity.py View on Github external
import datetime
from billy.core import db
import pymongo


db.popularity_counts.ensure_index([
    ('type', pymongo.ASCENDING),
    ('date', pymongo.ASCENDING),
    ('obj_id', pymongo.ASCENDING),
])


class Counter(object):

    def __init__(self, db, collection_name='popularity_counts'):
        self.counts = getattr(db, collection_name)

    def inc(self, type_name, obj_id, **kwargs):
        self.counts.update({
            'type': type_name, 'obj_id': obj_id,
            'date': datetime.datetime.utcnow().date().toordinal()},
            {'$inc': {'count': 1}, '$set': kwargs},
            upsert=True, safe=False)

    def top(self, type_name, n=1, days=None, with_counts=False, **kwargs):