How to use the pymysql.err.IntegrityError function in PyMySQL

To help you get started, we’ve selected a few PyMySQL examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github CCExtractor / sample-platform / tests / test_ci / TestControllers.py View on Github external
def test_finish_type_request_with_error(self, mock_g, mock_result, mock_rt):
        """
        Test function finish_type_request with error in database commit.
        """
        from mod_ci.controllers import finish_type_request
        from pymysql.err import IntegrityError

        mock_log = MagicMock()
        mock_request = MagicMock()
        mock_request.form = {
            'test_id': 1,
            'runTime': 1,
            'exitCode': 0
        }
        mock_g.db.commit.side_effect = IntegrityError

        finish_type_request(mock_log, 1, MagicMock(), mock_request)

        mock_log.debug.assert_called_once()
        mock_rt.query.filter.assert_called_once_with(mock_rt.id == 1)
        mock_result.assert_called_once_with(mock.ANY, mock.ANY, 1, 0, mock.ANY)
        mock_g.db.add.assert_called_once_with(mock.ANY)
        mock_g.db.commit.assert_called_once_with()
        mock_log.error.assert_called_once()
github grin-pool / grin-pool / grin-py / services / blockWatcher.py View on Github external
edge_bits = response["header"]["edge_bits"],
                                   total_difficulty = response["header"]["total_difficulty"],
                                   secondary_scaling = response["header"]["secondary_scaling"],
                                   num_inputs = len(response["inputs"]),
                                   num_outputs = len(response["outputs"]),
                                   num_kernels = len(response["kernels"]),
                                   fee = sum(k["fee"] for k in response["kernels"]),
                                   lock_height = response["kernels"][0]["lock_height"] if(len(response["kernels"])>0) else 0,
                                   total_kernel_offset = response["header"]["total_kernel_offset"],
                                   state = "new")
                    # Batch inserts when catching up
                    database.db.getSession().add(new_block)
                    if( (height % BATCHSZ == 0) or (height >= (latest-10)) ):
                        database.db.getSession().commit()
                    height = height + 1
                except (sqlalchemy.exc.IntegrityError, pymysql.err.IntegrityError):
                    LOGGER.warn("Attempted to re-add block: {}".format(response["header"]["height"]))
                    database.db.getSession().rollback()
                    latest_block = Blocks.get_latest()
                    height = latest_block.height + 1
                    sleep(check_interval)
            sys.stdout.flush()
            sleep(check_interval)
        except Exception as e:
            LOGGER.exception("Something went wrong: {}".format(repr(e)))
            database.db.getSession().rollback()
            sys.stdout.flush()
            sleep(check_interval)
    # Should never get here, but....
    LOGGER.warn("=== Completed {}".format(PROCESS))
github Times125 / encyclopediaCrawler / encyclopediaCrawler / db / dao.py View on Github external
def add_one(cls, data):
        res = 0
        with get_db_session() as db_session:
            try:
                db_session.add(data)
                db_session.commit()
                res = 1
                return res
            except (InternalError, SqlalchemyIntegrityError, PymysqlIntegrityError) as e:
                db_session.rollback()
                db_logger.error("exception '{}' happened when add data".format(e))
                return res
github SpiderClub / weibospider / db / user.py View on Github external
def save_users(users):
    try:
        db_session.add_all(users)
        db_session.commit()
    except (SI, PI, InvalidRequestError):
        for user in users:
            save_user(user)
github everyclass / everyclass-server / everyclass / server / db / pool.py View on Github external
import logging
import threading

warnings.filterwarnings('error', category=pymysql.err.Warning)
# use logging module for easy debug
logger = logging.getLogger(__name__)


class Connection(pymysql.connections.Connection):
    """
    Return a connection object with or without connection_pool feature.
    This is all the same with pymysql.connections.Connection instance except that with connection_pool feature:
        the __exit__() method additionally put the connection back to it's pool
    """
    _pool = None
    _reusable_exception = (pymysql.err.ProgrammingError, pymysql.err.IntegrityError, pymysql.err.NotSupportedError)

    def __init__(self, *args, **kwargs):
        pymysql.connections.Connection.__init__(self, *args, **kwargs)
        self.args = args
        self.kwargs = kwargs

    def __exit__(self, exc, value, traceback):
        """
        Overwrite the __exit__() method of pymysql.connections.Connection
        Base action: on successful exit, commit. On exception, rollback
        With pool additional action: put connection back to pool
        """
        pymysql.connections.Connection.__exit__(self, exc, value, traceback)
        if self._pool:
            if not exc or exc in self._reusable_exception:
                '''reusable connection'''
github sunhailin-Leo / BOC_FER_Spider / BOC_FER_Spider / pipelines.py View on Github external
def _conditional_insert(self, tx, item):
        params = (
                   item['currency_name'],
                   item['buying_rate'],
                   item['cash_buying_rate'],
                   item['selling_rate'],
                   item['cash_selling_rate'],
                   item['boe_conversion_rate'],
                   item['rate_time'],
                   item['md5_str'])
        try:
            tx.execute(INSERT_SQL, params)
        except pymysql.err.IntegrityError:
            pass
github Fuyukai / asyncqlio / asyncqlio / backends / mysql / aiomysql.py View on Github external
async def execute(self, sql: str, params=None):
        """
        Executes some SQL in the current transaction.
        """
        # parse DictCursor in order to get a dict-like cursor back
        # this will use the custom DictRow class passed from before
        cursor = await self.connection.cursor(cursor=aiomysql.DictCursor)
        # the doc lies btw
        # we can pass a dict in instead of a list/tuple
        # i don't fucking trust this at all though.
        try:
            res = await cursor.execute(sql, params)
        except pymysql.err.IntegrityError as e:
            raise IntegrityError(*e.args)
        except (pymysql.err.ProgrammingError, pymysql.err.InternalError) as e:
            raise DatabaseException(*e.args)
        finally:
            await cursor.close()
        return res
github linkedin / iris / src / iris / bin / iris_ctl.py View on Github external
config = yaml.safe_load(config_file)

    click.echo('Deleting plan %s' % plan)
    click.confirm('Do you want to continue?', abort=True)

    with db_from_config(config) as (conn, cursor):
        try:
            cursor.execute('DELETE plan_active FROM plan_active JOIN plan ON plan_active.`plan_id` = plan.`id` '
                           'WHERE plan.`name` = %s', plan)
            cursor.execute('DELETE plan_notification '
                           'FROM plan_notification JOIN plan ON plan_notification.`plan_id` = plan.`id` '
                           'WHERE plan.`name` = %s', plan)
            cursor.execute('DELETE FROM plan WHERE `name` = %s', plan)
            if cursor.rowcount == 0:
                raise click.ClickException('No plan found with given name')
        except IntegrityError as e:
            cursor.execute('''SELECT `message`.`id` FROM
                                  message JOIN plan ON `message`.`plan_id` = `plan`.`id`
                              WHERE plan.`name` = %s''', plan)
            msgs = cursor.fetchall()
            cursor.execute('''SELECT incident.`id` FROM incident JOIN plan
                                  ON `incident`.`plan_id` = `plan`.`id`
                              WHERE plan.`name` = %s''', plan)
            incidents = cursor.fetchall()

            if msgs:
                click.echo(click.style('Plan referenced by messages with ids:\n%s' % [m[0] for m in msgs],
                                       fg='red'),
                           err=True)
            if incidents:
                click.echo(click.style('Plan referenced by incidents with ids:\n%s' % [i[0] for i in incidents],
                                       fg='red'),
github hellobiek / smart_deal_tool / crawler / dspider / poster.py View on Github external
#  (2013, 'Lost connection to MySQL server during query ([Errno 110] Connection timed out)')
            #  (2003, "Can't connect to MySQL server on '127.0.0.1' ([WinError 10061] 由于目标计算机积极拒绝,无法连接。)")
            #  (0, '')    # after crawl started: sudo service mysqld stop
            logger.info('MySQL: exception {} {}, Trying to recommit in {} sec'.format(failure.type, args, self.mysql_reconnect_wait)) 
            # https://twistedmatrix.com/documents/12.1.0/core/howto/time.html
            from twisted.internet import task
            from twisted.internet import reactor
            task.deferLater(reactor, self.mysql_reconnect_wait, self.store)
            return
        elif failure.type in [DataError, InternalError]:
            #  (1264, "Out of range value for column 'position_id' at row 2")
            #  (1292, "Incorrect date value: '1977-06-31' for column 'release_day' at row 26")
            if failure.value.args[0] != 1062:
                logger.warn('MySQL: {} {} exception from item {}'.format(failure.type, args, item))
            return
        elif failure.type in [IntegrityError]:    
            #  (1048, "Column 'name' cannot be null") films 43894
            logger.warn('MySQL: {} {} exception from some items'.format(failure.type, args))
            return
        else:
            logger.error('MySQL: {} {} unhandled exception'.format(failure.type, args))
            return
github lucas-tulio / simple-reddit-crawler / reader / reader.py View on Github external
existingThreads = 0
	for t in subreddit:
		
		# Get the thread info
		threadId = t['data']['id']
		title = t['data']['title']
		permalink = t['data']['permalink']
		score = t['data']['score']
		created = t['data']['created_utc']
		
		# Save it to the database. Duplicate threads will be ignored due to the UNIQUE KEY constraint
		try:
			cur.execute("""INSERT INTO threads (id_thread, id_sub, title, url, score, created) values (%s, 1, %s, %s, %s, %s)""", (threadId, title, permalink, int(score), created))
			newThreads += 1
			print "New thread: " + title
		except pymysql.err.IntegrityError as e:
			existingThreads += 1

	# Print a summary
	print "Got " + str(newThreads + existingThreads) + " threads."
	print "Inserted " + str(newThreads) + " new threads"
	print "Found " + str(existingThreads) + " already existing threads"
	
	# Log totals
	global totalNewThreads
	totalNewThreads += newThreads
	global totalExistingThreads
	totalExistingThreads += existingThreads