How to use the pymysql.err.OperationalError function in PyMySQL

To help you get started, we’ve selected a few PyMySQL examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IntegralDefense / ACE / lib / saq / test_database.py View on Github external
def _t2():
            _uuid = str(uuid.uuid4())
            _lock_uuid = str(uuid.uuid4())
            try:
                with get_db_connection() as db:
                    c = db.cursor()
                    execute_with_retry(db, c, "UPDATE locks SET lock_owner = 'whatever'")
                    # wait for signal to continue
                    time.sleep(2)
                    execute_with_retry(db, c, "INSERT INTO locks ( uuid, lock_time ) VALUES ( %s, NOW() )", (_uuid,))
                    db.commit()
            except pymysql.err.OperationalError as e:
                if e.args[0] == 1213 or e.args[0] == 1205:
                    deadlock_event.set()
github aio-libs / aiomysql / aiomysql / connection.py View on Github external
if self.sql_mode is not None:
                await self.query("SET sql_mode=%s" % (self.sql_mode,))

            if self.init_command is not None:
                await self.query(self.init_command)
                await self.commit()

            if self.autocommit_mode is not None:
                await self.autocommit(self.autocommit_mode)
        except Exception as e:
            if self._writer:
                self._writer.transport.close()
            self._reader = None
            self._writer = None
            raise OperationalError(2003,
                                   "Can't connect to MySQL server on %r" %
                                   self._host) from e
github aiven / myhoard / myhoard / controller.py View on Github external
# update the could have purged timestamp that gets reported as metric data point because for inactive
                # server this is expected behavior and we don't want the metric value to indicate any abnormality
                # in system behavior.
                if only_binlogs_without_gtids:
                    last_could_have_purged = time.time()
            else:
                # PURGE BINARY LOGS TO 'name' does not delete the file identified by 'name' so we need to increase
                # the index by one to get also the last file removed
                base_name, index = binlogs_to_purge[-1]["file_name"].rsplit(".", 1)
                up_until_index = int(index) + 1
                up_until_name = relay_log_name(prefix=base_name, index=up_until_index, full_path=False)
                self.log.info("Purging %s binlogs, up until %r", len(binlogs_to_purge), up_until_name)
                try:
                    with mysql_cursor(**self._get_long_timeout_params()) as cursor:
                        cursor.execute(f"PURGE BINARY LOGS TO '{up_until_name}'")
                except pymysql.err.OperationalError as ex:
                    if mysql_maybe_not_running and ex.args[0] == ERR_CANNOT_CONNECT:
                        self.log.warning("Failed to connect to MySQL to purge binary logs: %r", ex)
                        return
                    if ex.args[0] == ERR_TIMEOUT:
                        # Timeout here doesn't matter much. We'll just retry momentarily
                        self.log.warning("Timeout while purging binary logs: %r", ex)
                        return
                    if ex.args[0] == ERR_CANNOT_CONNECT:
                        # Connection refused doesn't matter much - similar to timeout. We'll retry.
                        self.log.warning("Connection refused while purging binary logs: %r", ex)
                        return
                    raise
                last_purge = time.time()
                last_could_have_purged = last_purge
        finally:
            self.state_manager.update_state(
github tp4a / teleport / server / www / teleport / app / eom_common / eomcore / eom_mysql.py View on Github external
if not self.db:
            raise (NameError, "没有设置数据库信息")
        # self.conn = pymysql.connect(host=self.host, port=self.port, user=self.user, password=self.pwd,
        # login_timeout=self.login_timeout, database=self.db, charset="utf8")
        try:
            if self.conn is not None:
                self.conn.ping()
            else:
                self.conn = pymysql.connect(host=self.host,
                                        user=self.user,
                                        passwd=self.pwd,
                                        db=self.db,
                                        port=self.port,
                                        connect_timeout=self.login_timeout,
                                        charset='utf8')
        except pymysql.err.OperationalError:
            log.e('pymsql 连接数据库失败[%s:%d]\n' % (self.host, self.port))
            return None
        except Exception as e:
            log.e('con 连接数据库失败[%s:%d]\n' % (self.host, self.port))
            return None

        cur = self.conn.cursor()
        if not cur:
            log.e('cur 连接数据库失败[%s:%d]\n' % (self.host, self.port))
            raise (NameError, "连接数据库失败")
        else:
            return cur
github aio-libs / aiomysql / aiomysql / connection.py View on Github external
# Request server public key
                logger.debug("sha256: Requesting server public key")
                self.write_packet(b'\1')
                pkt = await self._read_packet()
                pkt.check_error()

        if pkt.is_extra_auth_data():
            self.server_public_key = pkt._data[1:]
            logger.debug(
                "Received public key:\n",
                self.server_public_key.decode('ascii')
            )

        if self._password:
            if not self.server_public_key:
                raise OperationalError("Couldn't receive server's public key")

            data = _auth.sha2_rsa_encrypt(
                self._password.encode('latin1'), self.salt,
                self.server_public_key
            )
        else:
            data = b''

        self.write_packet(data)
        pkt = await self._read_packet()
        pkt.check_error()
        return pkt
github aio-libs / aiomysql / aiomysql / connection.py View on Github external
plugin_name is not None):
                await self._process_auth(plugin_name, auth_packet)
            else:
                # send legacy handshake
                data = _auth.scramble_old_password(
                    self._password.encode('latin1'),
                    auth_packet.read_all()) + b'\0'
                self.write_packet(data)
                await self._read_packet()
        elif auth_packet.is_extra_auth_data():
            if auth_plugin == "caching_sha2_password":
                await self.caching_sha2_password_auth(auth_packet)
            elif auth_plugin == "sha256_password":
                await self.sha256_password_auth(auth_packet)
            else:
                raise OperationalError("Received extra packet "
                                       "for auth method %r", auth_plugin)
github DataDog / integrations-core / mysql / datadog_checks / mysql / mysql.py View on Github external
def _get_stats_from_innodb_status(self, db):
        # There are a number of important InnoDB metrics that are reported in
        # InnoDB status but are not otherwise present as part of the STATUS
        # variables in MySQL. Majority of these metrics are reported though
        # as a part of STATUS variables in Percona Server and MariaDB.
        # Requires querying user to have PROCESS privileges.
        try:
            with closing(db.cursor()) as cursor:
                cursor.execute("SHOW /*!50000 ENGINE*/ INNODB STATUS")
        except (pymysql.err.InternalError, pymysql.err.OperationalError, pymysql.err.NotSupportedError) as e:
            self.warning("Privilege error or engine unavailable accessing the INNODB status \
                         tables (must grant PROCESS): %s" % str(e))
            return {}

        if cursor.rowcount < 1:
            # No data from SHOW ENGINE STATUS, even though the engine is enabled.
            # EG: This could be an Aurora Read Instance
            self.warning("""'SHOW ENGINE INNODB STATUS' returned no data.
                If you are running an Aurora Read Instace, \
                this is expected and you should disable the innodb metrics collection""")
            return {}

        innodb_status = cursor.fetchone()
        innodb_status_text = innodb_status[2]

        results = defaultdict(int)
github Mausbiber / smartHome / libs / db.py View on Github external
def query(self, sql, *arguments):
        try:
            cursor = self.conn.cursor(pymysql.cursors.DictCursor)
            cursor.execute(sql, arguments)
        except (AttributeError, pymysql.err.OperationalError):
            self.connect()
            cursor = self.conn.cursor(pymysql.cursors.DictCursor)
            cursor.execute(sql, arguments)
        return cursor
github aio-libs / aiomysql / aiomysql / connection.py View on Github external
# magic numbers:
        # 2 - request public key
        # 3 - fast auth succeeded
        # 4 - need full auth

        pkt.advance(1)
        n = pkt.read_uint8()

        if n == 3:
            logger.debug("caching sha2: succeeded by fast path.")
            pkt = await self._read_packet()
            pkt.check_error()  # pkt must be OK packet
            return pkt

        if n != 4:
            raise OperationalError("caching sha2: Unknown "
                                   "result for fast auth: {0}".format(n))

        logger.debug("caching sha2: Trying full auth...")

        if self._ssl_context:
            logger.debug("caching sha2: Sending plain "
                         "password via secure connection")
            self.write_packet(self._password.encode('latin1') + b'\0')
            pkt = await self._read_packet()
            pkt.check_error()
            return pkt

        if not self.server_public_key:
            self.write_packet(b'\x02')
            pkt = await self._read_packet()  # Request public key
            pkt.check_error()