How to use the dbutils.SQLExec function in DBUtils

To help you get started, we’ve selected a few DBUtils examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github cbick / gps2gtfs / core / src / dbqueries.py View on Github external
def load_gps_segment_header(segment_id):
  """
  Given a segment ID, returns:
     (trip_id,trip_date,vehicle_id,schedule_error,offset)
  where trip_id is the gtfs trip ID, trip_date is the date on whose schedule
  the trip took place, vehicle_id is the gps vehicle's ID, schedule_error
  is the measured error between the GPS route and the GTFS schedule, and 
  offset is the number of seconds to substract from any GTFS schedule times.
  """
  sql_header = """select trip_id, trip_date, vehicle_id, schedule_error,
                    schedule_offset_seconds
                  from gps_segments
                  where gps_segment_id=%(segID)s"""

  cur = get_cursor()
  SQLExec(cur,sql_header,{'segID':segment_id});
  header = [r for r in cur][0];
  cur.close()

  trip_id,trip_date,veh_id,sched_err,sched_off= \
      map(lambda c:header[c],
          ('trip_id','trip_date','vehicle_id','schedule_error',
           'schedule_offset_seconds'));
  return trip_id,trip_date,veh_id,sched_err,sched_off
github cbick / gps2gtfs / realtime / src / rtqueries.py View on Github external
def create_observation_row( trip_id, stop_id, day_of_week, stop_sequence, 
                             lateness_minutes, initial_num_obs=1 ):
  obs_id = get_observation_stop_id(trip_id,stop_id,day_of_week,stop_sequence,
                                   auto_create = True)
  sql = """\
insert into simplified_lateness_observations 
  ( minutes_late, num_observations, observed_stop_id )
values
  ( %(minutes)s, %(init)s, %(obsid)s )
"""
  cur = get_cursor()
  SQLExec(cur, sql, {'minutes':lateness_minutes,'init':initial_num_obs,
                     'obsid':obs_id})
  cur.close()
github cbick / gps2gtfs / common / src / ServiceDateHandler.py View on Github external
day = min_date
    if isinstance(day,basestring):
      day = ServiceDateHandler.parseDate(day)
      max_date = ServiceDateHandler.parseDate(max_date)

    while day <= max_date:
      service_ids = tuple(self.effective_service_ids(day));

      # If it already exists, don't do anything
      if self.existing_combos.has_key(service_ids):
        pass;
      # Otherwise, put it in the db
      else:
        db.SQLExec(cur,"""insert into service_combo_ids values (DEFAULT)""");
        db.SQLExec(cur,"""select currval('service_combo_ids_combination_id_seq')""");
        combo_id = int(cur.fetchone()[0]);

        self.existing_combos[service_ids] = combo_id;
        self.combos[combo_id] = service_ids;

        insert_sql = """insert into service_combinations (combination_id,
                        service_id) values (%(combo)s,%(service)s)"""

        if __debug__:
          db.SQLExec(cur,"""select count(*) from gtf_trips 
                         where service_id in ('%(sids)s')""" %
                      {'sids':"','".join(service_ids)}
                  );
          print "======== Creating Combo ========="
          print "ID:",combo_id
          print "Service IDs:",service_ids
github cbick / gps2gtfs / realtime / src / rtqueries.py View on Github external
SQLExec(cur,"select max(observed_stop_id) from observation_attributes")
  r = list(cur)

  if len(r) == 0 or r[0][0] is None:
    newid = 0
  else:
    newid = r[0][0] + 1

  sql = """\
insert into observation_attributes
  (observed_stop_id, trip_id, stop_id, stop_sequence, day_of_week)
values
  ( %(osid)s, %(tid)s, %(sid)s, %(seq)s, %(dow)s )
"""

  SQLExec(cur, sql, {'osid':newid, 'tid':trip_id, 'sid':stop_id,
                     'seq':stop_sequence, 'dow':day_of_week})
  cur.close()

  return newid
github cbick / gps2gtfs / core / src / dbqueries.py View on Github external
'reported_update_time'
  """
  if len(dirtags) == 0:
    return []
  p = {}
  for i,d in enumerate(dirtags):
    p['k'+str(i)] = d;
  sql = """SELECT id,lat,lon,routetag,dirtag,reported_update_time 
             from vehicle_track 
             where dirtag IN ( %s )
           order by reported_update_time asc""" \
      % (','.join(map(lambda k: "%("+k+")s", p.keys())) ,)

  cur = get_cursor();
  print "Executing..."
  SQLExec(cur, sql, p);
  print "Retrieving..."
  ret = cur.fetchall();
  print "...done."

  cur.close();
  return ret;
github cbick / gps2gtfs / common / src / ServiceDateHandler.py View on Github external
one_day = timedelta(days=1);

    day = min_date
    if isinstance(day,basestring):
      day = ServiceDateHandler.parseDate(day)
      max_date = ServiceDateHandler.parseDate(max_date)

    while day <= max_date:
      service_ids = tuple(self.effective_service_ids(day));

      # If it already exists, don't do anything
      if self.existing_combos.has_key(service_ids):
        pass;
      # Otherwise, put it in the db
      else:
        db.SQLExec(cur,"""insert into service_combo_ids values (DEFAULT)""");
        db.SQLExec(cur,"""select currval('service_combo_ids_combination_id_seq')""");
        combo_id = int(cur.fetchone()[0]);

        self.existing_combos[service_ids] = combo_id;
        self.combos[combo_id] = service_ids;

        insert_sql = """insert into service_combinations (combination_id,
                        service_id) values (%(combo)s,%(service)s)"""

        if __debug__:
          db.SQLExec(cur,"""select count(*) from gtf_trips 
                         where service_id in ('%(sids)s')""" %
                      {'sids':"','".join(service_ids)}
                  );
          print "======== Creating Combo ========="
          print "ID:",combo_id
github cbick / gps2gtfs / postprocessing / src / Stats.py View on Github external
def independent_sampling_cdf(rows=None):
  
  if rows is None:
    cur = db.get_cursor();
    rows = []
    print "Selecting..."
    # For each gps_segment we want to randomly select a sample
    for i in range(58903):
      if (i+1)%(58903/100)==0:
        print " %d/%d"%(i+1,58903)
      if random() < 0.5: 
        continue
      db.SQLExec(cur,
"""select lateness,trip_stop_weight 
   from datamining_table dm natural join trip_stop_weights tsw
     where gps_segment_id=%(gseg)s and lateness is not null 
     order by random() limit 1""",
                 {'gseg':i});
      if cur.rowcount > 0:
        srow = cur.fetchall()[0]; #should be just one
        rows.append(srow)
    cur.close()
    print len(rows),"rows retrieved."

  try:
    data = array([(r['lateness'],r['trip_stop_weight']) for r in rows])
    x,p,a_n = ecdf(data,weighted=True)
  
    figure()
github cbick / gps2gtfs / core / src / dbqueries.py View on Github external
Keys:
  'shape_pt_lat',
  'shape_id',
  'shape_pt_lon',
  'shape_pt_sequence',
  'shape_dist_traveled'
  """
  cur = get_cursor();
  if not isinstance(trip_id,basestring):
    trip_id = str(trip_id);

  SQLExec(cur,"select * from gtf_trips where trip_id=%(id)s",
          {'id':trip_id});
  trip_header = cur.next();

  SQLExec(cur,
          "select * from gtf_stop_times natural join gtf_stops \
              where trip_id=%(id)s order by stop_sequence",
          {'id':trip_id});
  stops = [row for row in cur];

  SQLExec(cur,
          "select * from gtf_shapes where shape_id = %(id)s \
              order by shape_pt_sequence",
          {'id':trip_header['shape_id']});
  shape = [row for row in cur];
  
  cur.close();
  return (trip_header, stops, shape);
github cbick / gps2gtfs / realtime / src / rtqueries.py View on Github external
def measure_prob_mass( trip_id, stop_id, day_of_week, stop_sequence, 
                       lateness_bounds ):
  sql = """\
select num_observations, minutes_late
from simplified_lateness_observations slo
  inner join observation_attributes oa
    on slo.observed_stop_id = oa.observed_stop_id
      and oa.trip_id=%(tid)s
      and oa.stop_sequence=%(seq)s
      and oa.day_of_week=%(dow)s
"""

  cur = get_cursor()
  SQLExec(cur, sql, {'tid':trip_id,'seq':stop_sequence,'dow':day_of_week})

  rows = map( lambda r: (r['num_observations'],r['minutes_late']),
              cur.fetchall() );
  
  cur.close()

  reducer = lambda l,r: l+r[0]
  total = reduce( reducer, rows, 0 )
  sums = [0] * len(lateness_bounds)

  for i,(min,max) in enumerate(lateness_bounds):
    sums[i] = reduce(reducer,
                     filter( lambda r: min<=r[1]<=max, rows ),
                     0)
  
  if total == 0: