How to use the datetime.datetime.min function in DateTime

To help you get started, we’ve selected a few DateTime examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nasa-gibs / worldview / tasks / processTemporalLayer.py View on Github external
else:
                startTime = times[0].replace('T', ' ').replace('Z', '')
                endTime = times[1].replace('T', ' ').replace('Z', '')
                start_date = min(start_date,
                    datetime.strptime(startTime, "%Y-%m-%d %H:%M:%S"))
                end_date = max(end_date,
                    datetime.strptime(endTime, "%Y-%m-%d %H:%M:%S"))
                if start_date:
                    startTimeParse = datetime.strptime(startTime, "%Y-%m-%d %H:%M:%S")
                    date_range_start.append(startTimeParse.strftime("%Y-%m-%d") + "T" + startTimeParse.strftime("%H:%M:%S") + "Z")
                if end_date:
                    endTimeParse = datetime.strptime(endTime, "%Y-%m-%d %H:%M:%S")
                    date_range_end.append(endTimeParse.strftime("%Y-%m-%d") + "T" + endTimeParse.strftime("%H:%M:%S") + "Z")

            wv_layer["startDate"] = start_date.strftime("%Y-%m-%d") + "T" + start_date.strftime("%H:%M:%S") + "Z"
            if end_date != datetime.min:
                wv_layer["endDate"] = end_date.strftime("%Y-%m-%d") + "T" + end_date.strftime("%H:%M:%S") + "Z"
            if date_range_start and date_range_end:
                wv_layer["allDateRanges"] = [{"startDate": s, "endDate": e} for s, e in zip(date_range_start, date_range_end)]
    except ValueError:
        raise
        raise Exception("Invalid time: {0}".format(range))
    return wv_layer
github antonkorbalev / stockdataprocessing / DbDump.py View on Github external
microsecond=int(broken.group(8) or "0")))
    """
    # parse date in UNIX time stamp
    return datetime.fromtimestamp(float(ts))


date = datetime.utcnow() - timedelta(days=daysTotal)
dateStop = datetime.utcnow()

candleDiff = conf.candleDiff
if conf.candlePeriod == 'M':
    candleDiff = candleDiff * 60
if conf.candlePeriod == 'H':
    candleDiff = candleDiff * 3600

last_id = datetime.min
while date < dateStop - timedelta(seconds=step):
    dateFrom = date
    dateTo = date + timedelta(seconds=step)
    data = downloader.get_data_from_oanda_fx(oanda, conf.insName, '{0}{1}'.format(conf.candlePeriod, conf.candleDiff),
                                             dateFrom, dateTo)
    if len(data.get('candles')) > 0:
        cmd = ''
        cmd = ('INSERT INTO {0} VALUES').format(tName)
        cmd_bulk = ''
        for candle in data.get('candles'):
            id = parse_date(candle.get('time'))
            # add missing dates (when the price does not change)
            """
            if last_id != datetime.min:
                md = last_id + timedelta(seconds=candleDiff)
                while md != id:
github seung-lab / PyChunkedGraph / src / pychunkedgraph / chunkedgraph.py View on Github external
def read_agglomeration_id_history(self, agglomeration_id, time_stamp=None):
        """ Returns all agglomeration ids agglomeration_id was part of

        :param agglomeration_id: int
        :param time_stamp: None or datetime
            restrict search to ids created after this time_stamp
            None=search whole history
        :return: array of int
        """
        if time_stamp is None:
            time_stamp = datetime.datetime.min

        if time_stamp.tzinfo is None:
            time_stamp = UTC.localize(time_stamp)

        id_working_set = np.array([agglomeration_id], dtype=np.uint64)
        visited_ids = []
        id_history = [agglomeration_id]

        former_parent_key = serialize_key("former_parents")
        new_parent_key = serialize_key("new_parents")

        i = 0
        while len(id_working_set) > 0:
            i += 1

            next_id = id_working_set[0]
github hyperledger / indy-plenum / plenum / common / metrics_stats.py View on Github external
def trunc_ts(ts: datetime, step: timedelta):
    base = datetime.min.replace(year=2000)
    step_s = step.total_seconds()
    seconds = (ts - base).total_seconds()
    seconds = int(seconds / step_s) * step_s
    return (base + timedelta(seconds=seconds, milliseconds=500)).replace(microsecond=0)
github mangroveorg / datawinners / datawinners / sms / admin.py View on Github external
def export_sms_details_to_excel(modeladmin, request, query_set):
        list = []

        textSearchFilter = get_text_search_filter(request.GET, SMSAdmin.search_fields)
        adminPanelFilter = get_admin_panel_filter(request.GET)

        filteredSms = SMS.objects.all().filter(Q(**adminPanelFilter) & (textSearchFilter))
        for sms in filteredSms:
            delivered_date_time = ExcelDate(datetime.combine(sms.delivered_at, datetime.min.time()),
                                            'dd.mm.yyyy') if sms.delivered_at else None
            list.append([sms.organization_id, sms.status, delivered_date_time, sms.msg_from, sms.msg_to, sms.msg_type, sms.message])

        headers = ['Organisation Id', 'Status', 'Delivery Date', 'Message from Number', 'Message to Number', 'Message Type', 'Content']
        response = export_to_new_excel(headers, list, 'sms_list')
        return response
github the-blue-alliance / the-blue-alliance / helpers / season_helper.py View on Github external
def stop_build_datetime_est(year=datetime.now().year):
        """ Computes day teams are done working on robots. The stop build day is kickoff + 6 weeks + 3 days. Set to 23:59:59 """
        stop_build_date = datetime.combine(SeasonHelper.kickoff_datetime_est(year).date() + timedelta(days=4, weeks=6), datetime.min.time()) - timedelta(seconds=1)
        return EST.localize(stop_build_date)  # Make our timezone unaware datetime timezone aware
github superdesk / superdesk-planning / server / planning / planning / planning.py View on Github external
None)
                    target_desk = coverage.get('assigned_to', original_coverage.get('assigned_to', {})).get('desk',
                                                                                                            None)

                    PlanningNotifications().notify_assignment(
                        coverage_status=coverage.get('workflow_status'),
                        target_desk=target_desk if target_user is None else None,
                        target_user=target_user,
                        contact_id=contact_id,
                        message='assignment_internal_note_msg',
                        coverage_type=get_coverage_type_name(
                            coverage.get('planning', {}).get('g2_content_type', '')),
                        slugline=coverage.get('planning', {}).get('slugline', ''),
                        internal_note=coverage.get('planning', {}).get('internal_note', ''))
                # If the scheduled time for the coverage changes
                if coverage.get('planning', {}).get('scheduled', datetime.min).strftime('%c') != \
                        original_coverage.get('planning', {}).get('scheduled', datetime.min).strftime('%c'):
                    target_user = coverage.get('assigned_to', original_coverage.get('assigned_to', {})).get('user',
                                                                                                            None)
                    target_desk = coverage.get('assigned_to', original_coverage.get('assigned_to', {})).get('desk',
                                                                                                            None)
                    PlanningNotifications().notify_assignment(
                        coverage_status=coverage.get('workflow_status'),
                        target_desk=target_desk if target_user is None else None,
                        target_user=target_user,
                        contact_id=contact_id,
                        message='assignment_due_time_msg',
                        due=utc_to_local(app.config['DEFAULT_TIMEZONE'],
                                         coverage.get('planning', {}).get('scheduled')).strftime('%c'),
                        coverage_type=get_coverage_type_name(
                            coverage.get('planning', {}).get('g2_content_type', '')),
                        slugline=coverage.get('planning', {}).get('slugline', ''))
github cyborgbackup / cyborgbackup / src / cyborgbackup / main / utils / task_manager.py View on Github external
def cleanup_inconsistent_celery_tasks(self):
        '''
        Rectify cyborgbackup db <-> celery inconsistent view of jobs state
        '''
        last_cleanup = cache.get('last_celery_task_cleanup') or datetime.min.replace(tzinfo=utc)
        if (tz_now() - last_cleanup).seconds < 60*3:
            return

        logger.debug("Failing inconsistent running jobs.")
        celery_task_start_time = tz_now()
        active_task_queues, active_queues = self.get_active_tasks()
        cache.set('last_celery_task_cleanup', tz_now())

        if active_queues is None:
            logger.error('Failed to retrieve active tasks from celery')
            return None

        '''
        Only consider failing tasks on instances for which we obtained a task
        list from celery for.
        '''
github g0vhk-io / g0vhk_legco_web / gov_track_hk_web / api / views.py View on Github external
def list(self, request):
        bills = Bill.objects.filter(third_reading__third_reading_date = datetime.min)
        output = []
        for bill in bills:
            output.append({'title_en': bill.bill_title_en, 'title_ch': bill.bill_title_ch, 'id': bill.id})
        return Response(output)
github mechaphish / worker / worker / workers / were_rabbit.py View on Github external
"""Run Were Rabbit crash explorer."""

        assert not self._cs.is_multi_cbn, "WereRabbit should only be scheduled single CBs"

        self._timeout = job.limit_time

        # first, get the crahes we have currently discovered, these will be used
        # to seed the crash explorer
        LOG.info("Gathering all found crashes")
        all_crashes = list(self._cs.crashes)
        if len(all_crashes) > 0:
            self._seen.update(str(c.blob) for c in all_crashes)
        else:
            raise Exception("No crashes found to explore (why was I scheduled?)")

        self._last_sync_time = datetime.min

        LOG.info("Starting up crash fuzzer")
        self._fuzzer = fuzzer.Fuzzer(self._cbn.path, self._workdir,
                                     self._job.limit_cpu, seeds=self._seen,
                                     create_dictionary=True, crash_mode=True)

        LOG.info("Created crash fuzzer")
        self._fuzzer.start()
        for _ in range(15):
            if self._fuzzer.alive:
                break
            time.sleep(1)
        else:
            raise Exception("Crash fuzzer failed to start")

        LOG.info("Started crash fuzzer")