How to use the influxdb.DataFrameClient function in influxdb

To help you get started, we’ve selected a few influxdb examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github influxdata / influxdb-python / tests / influxdb / client_test_with_server.py View on Github external
def _setup_influxdb_server(inst):
    inst.influxd_inst = InfluxDbInstance(
        inst.influxdb_template_conf,
        udp_enabled=getattr(inst, 'influxdb_udp_enabled', False))

    inst.cli = InfluxDBClient('localhost',
                              inst.influxd_inst.http_port,
                              'root',
                              '',
                              database='db')
    if not using_pypy:
        inst.cliDF = DataFrameClient('localhost',
                                     inst.influxd_inst.http_port,
                                     'root',
                                     '',
                                     database='db')
github ivan-vasilev / atpy / tests / quandl / test_api.py View on Github external
def test_influxdb_cache(self):
        client = InfluxDBClient(host='localhost', port=8086, username='root', password='root', database='test_cache')

        try:
            client.drop_database('test_cache')
            client.create_database('test_cache')
            client.switch_database('test_cache')

            with InfluxDBCache(client=DataFrameClient(host='localhost', port=8086, username='root', password='root', database='test_cache')) as cache:
                listeners = SyncListeners()
                QuandlEvents(listeners)

                non_cache_data = get_table([{'datatable_code': 'SHARADAR/SF1', 'ticker': 'AAPL', 'dimension': 'MRY', 'qopts': {"columns": ['dimension', 'ticker', 'datekey', 'revenue']}},
                                            {'datatable_code': 'SHARADAR/SF1', 'ticker': 'IBM', 'dimension': 'MRY', 'qopts': {"columns": ['dimension', 'ticker', 'datekey', 'revenue']}}])

                items = list()
                for df in non_cache_data['SHARADAR/SF1']:
                    items.append(df.rename({'revenue': 'value', 'datekey': 'timestamp'}, axis=1).set_index('timestamp'))

                cache.add_to_cache('sf1', iter(items), tag_columns=['dimension', 'ticker'])
                cache_data = cache.request_data('sf1', tags={'ticker': {'AAPL', 'IBM'}})

                listeners = SyncListeners()
                QuandlEvents(listeners)
github ivan-vasilev / atpy / scripts / update_influxdb_cache.py View on Github external
parser.add_argument('-host', type=str, default='localhost', help="InfluxDB location host")
    parser.add_argument('-port', type=int, default=8086, help="InfluxDB host port")
    parser.add_argument('-user', type=str, default='root', help="InfluxDB username")
    parser.add_argument('-password', type=str, default='root', help="InfluxDB password")
    parser.add_argument('-database', type=str, default='cache', help="InfluxDB database name")
    parser.add_argument('-drop', action='store_true', help="Drop the database")
    parser.add_argument('-skip_if_older', type=int, default=None, help="Skip symbols, which are in the database, but have no activity for more than N previous days")
    parser.add_argument('-interval_len', type=int, default=None, required=True, help="Interval length")
    parser.add_argument('-interval_type', type=str, default='s', help="Interval type (seconds, days, etc)")
    parser.add_argument('-iqfeed_conn', type=int, default=10, help="Number of historical connections to IQFeed")
    parser.add_argument('-delta_back', type=int, default=10, help="Default number of years to look back")
    parser.add_argument('-symbols_file', type=str, default=None, help="location to locally saved symbols file (to prevent downloading it every time)")
    args = parser.parse_args()

    client = DataFrameClient(host=args.host, port=args.port, username=args.user, password=args.password, database=args.database, pool_size=1)

    logging.getLogger(__name__).info("Updating database with arguments: " + str(args))

    if args.drop:
        client.drop_database(args.database)

    if args.database not in [d['name'] for d in client.get_list_database()]:
        client.create_database(args.database)
        client.query("ALTER RETENTION POLICY autogen ON cache DURATION INF REPLICATION 1 SHARD DURATION 2600w DEFAULT")

    client.switch_database(args.database)

    with IQFeedHistoryProvider(num_connections=args.iqfeed_conn) as history:
        all_symbols = {(s, args.interval_len, args.interval_type) for s in set(iqutil.get_symbols(symbols_file=args.symbols_file).keys())}
        update_to_latest(client=client, noncache_provider=noncache_provider(history), new_symbols=all_symbols, time_delta_back=relativedelta(years=args.delta_back),
                         skip_if_older_than=relativedelta(days=args.skip_if_older) if args.skip_if_older is not None else None)
github ivan-vasilev / atpy / atpy / data / intrinio / influxdb_cache.py View on Github external
def new_df_client(self):
        return DataFrameClient(**self.kwargs)
github ivan-vasilev / atpy / atpy / data / cache / influxdb_cache_requests.py View on Github external
def enable_mean(self, symbol: typing.Union[list, str] = None, bgn_prd: datetime.datetime = None, end_prd: datetime.datetime = None):
        """
        :param symbol: symbol or symbol list
        :param bgn_prd: start datetime (excluding)
        :param end_prd: end datetime (excluding)
        :return: data from the database
        """
        query = "SELECT MEAN(delta) FROM (SELECT symbol, (close - open) / open as delta FROM bars" + \
                _query_where(interval_len=self.interval_len, interval_type=self.interval_type, symbol=symbol, bgn_prd=bgn_prd, end_prd=end_prd) + \
                ") GROUP BY symbol"

        rs = super(DataFrameClient, self.client).query(query, chunked=True)
        self.means = {k[1]['symbol']: next(data)['mean'] for k, data in rs.items()}
github influxdata / influxdb-python / examples / tutorial_pandas.py View on Github external
def main(host='localhost', port=8086):
    """Instantiate the connection to the InfluxDB client."""
    user = 'root'
    password = 'root'
    dbname = 'demo'
    protocol = 'line'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30, freq='H'), columns=['0'])

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo',
                        {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)
github ivan-vasilev / atpy / atpy / data / cache / influxdb_cache_requests.py View on Github external
def enable_stddev(self, symbol: typing.Union[list, str] = None, bgn_prd: datetime.datetime = None, end_prd: datetime.datetime = None):
        """
        :param symbol: symbol or symbol list
        :param bgn_prd: start datetime (excluding)
        :param end_prd: end datetime (excluding)
        :return: data from the database
        """
        query = "SELECT STDDEV(delta) FROM (SELECT symbol, (close - open) / open as delta FROM bars" + \
                _query_where(interval_len=self.interval_len, interval_type=self.interval_type, symbol=symbol, bgn_prd=bgn_prd, end_prd=end_prd) + \
                ") GROUP BY symbol"

        rs = super(DataFrameClient, self.client).query(query, chunked=True)
        self.stddev = {k[1]['symbol']: next(data)['stddev'] for k, data in rs.items()}
github hellobiek / smart_deal_tool / cinfluxdb.py View on Github external
def __init__(self, dbinfo, dbname, iredis = create_redis_obj()):
        self.redis  = iredis
        self.dbname = dbname
        self.df_client = DataFrameClient(dbinfo['host'], dbinfo['port'], dbinfo['user'], dbinfo['password'], self.dbname, timeout=10)