How to use the influxdb.exceptions.InfluxDBClientError function in influxdb

To help you get started, we’ve selected a few influxdb examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NAStools / homeassistant / tests / components / test_influxdb.py View on Github external
def test_setup_query_fail(self, mock_client):
        """Test the setup for query failures."""
        config = {
            'influxdb': {
                'host': 'host',
                'username': 'user',
                'password': 'pass',
            }
        }
        mock_client.return_value.query.side_effect = \
            influx_client.exceptions.InfluxDBClientError('fake')
        assert not setup_component(self.hass, influxdb.DOMAIN, config)
github influxdata / influxdb-python / tests / influxdb / client_test_with_server.py View on Github external
def test_alter_retention_policy_invalid(self):
        self.cli.create_retention_policy('somename', '1d', 1)
        with self.assertRaises(InfluxDBClientError) as ctx:
            self.cli.alter_retention_policy('somename', 'db')
        self.assertEqual(400, ctx.exception.code)
        self.assertIn('{"error":"error parsing query: ',
                      ctx.exception.content)
        rsp = self.cli.get_list_retention_policies()
        self.assertEqual(
            [{'duration': '0', 'default': True,
              'replicaN': 1, 'name': 'default'},
             {'duration': '24h0m0s', 'default': False,
              'replicaN': 1, 'name': 'somename'}],
            rsp
        )
github amancevice / influxalchemy / tests / client_tests.py View on Github external
def test_query(mock_flux):
    db = influxdb.InfluxDBClient(database="fizz")
    db.query.side_effect = influxdb.exceptions.InfluxDBClientError(None)
    client = InfluxAlchemy(db)
    query = client.query(Measurement.new("buzz"))
    tools.assert_equal(str(query), "SELECT * FROM buzz;")
github ODM2 / CZ-Manager / odm2admin / management / commands / generate_influx_data.py View on Github external
def add_dataframe_to_database(self, dataframe, identifier):
        try:
            write_success = self.client.write_points(dataframe, identifier, time_precision='s', batch_size=self.batch_size)
            return len(dataframe) if write_success else 0
        except InfluxDBClientError as e:
            print('Error while writing to database {}: {}'.format(identifier, e.message))
            return 0
github influxdata / influxdb-python / influxdb / client.py View on Github external
response._msgpack = None

        def reformat_error(response):
            if response._msgpack:
                return json.dumps(response._msgpack, separators=(',', ':'))
            else:
                return response.content

        # if there's not an error, there must have been a successful response
        if 500 <= response.status_code < 600:
            raise InfluxDBServerError(reformat_error(response))
        elif response.status_code == expected_response_code:
            return response
        else:
            err_msg = reformat_error(response)
            raise InfluxDBClientError(err_msg, response.status_code)
github Isilon / isilon_data_insights_connector / influxdb_plugin.py View on Github external
LOG.debug("Writing points %d", num_points)
    write_index = 0
    points_written = 0
    while write_index < num_points:
        max_write_index = write_index + MAX_POINTS_PER_WRITE
        write_points = points[write_index:max_write_index]
        try:
            g_client.write_points(write_points)
            points_written += len(write_points)
        except InfluxDBServerError as svr_exc:
            LOG.error(
                "InfluxDBServerError: %s\nFailed to write points: %s",
                str(svr_exc),
                _get_point_names(write_points),
            )
        except InfluxDBClientError as client_exc:
            LOG.error(
                "InfluxDBClientError writing points: %s\n" "Error: %s",
                _get_point_names(write_points),
                str(client_exc),
            )
        except requests.exceptions.ConnectionError as req_exc:
            LOG.error(
                "ConnectionError exception caught writing points: %s\n" "Error: %s",
                _get_point_names(write_points),
                str(req_exc),
            )
        write_index += MAX_POINTS_PER_WRITE

    return points_written
github faucetsdn / faucet / faucet / gauge_influx.py View on Github external
host=self.conf.influx_host,
                    port=self.conf.influx_port,
                    username=self.conf.influx_user,
                    password=self.conf.influx_pwd,
                    database=self.conf.influx_db,
                    timeout=self.conf.influx_timeout)
                if client:
                    if client.write_points(points=points, time_precision='s'):
                        return True
                    self.logger.warning(
                        '%s failed to update InfluxDB' % self.ship_error_prefix)
                else:
                    self.logger.warning(
                        '%s error connecting to InfluxDB' % self.ship_error_prefix)
            except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout,
                    InfluxDBClientError, InfluxDBServerError) as err:
                self.logger.warning('%s %s' % (self.ship_error_prefix, err))
        return False
github VOLTTRON / volttron / volttron / platform / dbutils / influxdbutils.py View on Github external
query = 'SELECT value FROM {}'.format(measurement)
        if tags_conditions:
            query += ' WHERE {}'.format(tags_conditions)

    if start:
        start_time = format_timestamp(start)
        query += ' AND time >= \'%s\'' % start_time
    if end:
        end_time = format_timestamp(end)
        query += ' AND time <= \'%s\'' % end_time

    if agg_period:
        if not re.search(AGG_PERIOD_REGEX, agg_period):
            raise ValueError("Aggregation period {} is in wrong format".format(agg_period))
        elif agg_period[:-1] == 'M':  # Influxdb only support m, h, d and w but not M (month)
            raise InfluxDBClientError("Influxdb hasn't supported GROUP BY month yet")

        if use_calendar_time_periods:
            query += 'GROUP BY time(%s)' % agg_period
        else:
            # @TODO: offset by now() is removed in new version.
            # Using InfluxDB version <1.2.4 to get this work.
            query += 'GROUP BY time(%s, now())' % agg_period
    if order == "LAST_TO_FIRST":
        query += ' ORDER BY time DESC'

    query += ' LIMIT %d' % count
    if skip:
        query += ' OFFSET %d' % skip

    try:
        rs = client.query(query)
github metabrainz / listenbrainz-server / listenbrainz / listenstore / influx_listenstore.py View on Github external
if len(listens):
            # Enter a measurement to count items inserted
            submit = [{
                'measurement': TEMP_COUNT_MEASUREMENT,
                'tags': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                },
                'fields': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                }
            }]
            try:
                if not self.influx.write_points(submit):
                    self.log.error("Cannot write listen cound to influx. (write_points returned False)")
            except (InfluxDBServerError, InfluxDBClientError, ValueError) as err:
                self.log.error("Cannot write data to influx: %s, data: %s", str(err), json.dumps(submit, indent=3), exc_info=True)
                raise