How to use the tabulate.PRESERVE_WHITESPACE function in tabulate

To help you get started, we’ve selected a few tabulate examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github astanin / python-tabulate / test / test_output.py View on Github external
def test_preserve_whitespace():
    "Output: Default table output, but with preserved leading whitespace."
    tabulate_module.PRESERVE_WHITESPACE = True
    table_headers = ["h1", "h2", "h3"]
    test_table = [["  foo", " bar   ", "foo"]]
    expected = "\n".join(
        ["h1     h2       h3", "-----  -------  ----", "  foo   bar     foo"]
    )
    result = tabulate(test_table, table_headers)
    assert_equal(expected, result)

    tabulate_module.PRESERVE_WHITESPACE = False
    table_headers = ["h1", "h2", "h3"]
    test_table = [["  foo", " bar   ", "foo"]]
    expected = "\n".join(["h1    h2    h3", "----  ----  ----", "foo   bar   foo"])
    result = tabulate(test_table, table_headers)
    assert_equal(expected, result)
github astanin / python-tabulate / test / test_output.py View on Github external
def test_preserve_whitespace():
    "Output: Default table output, but with preserved leading whitespace."
    tabulate_module.PRESERVE_WHITESPACE = True
    table_headers = ["h1", "h2", "h3"]
    test_table = [["  foo", " bar   ", "foo"]]
    expected = "\n".join(
        ["h1     h2       h3", "-----  -------  ----", "  foo   bar     foo"]
    )
    result = tabulate(test_table, table_headers)
    assert_equal(expected, result)

    tabulate_module.PRESERVE_WHITESPACE = False
    table_headers = ["h1", "h2", "h3"]
    test_table = [["  foo", " bar   ", "foo"]]
    expected = "\n".join(["h1    h2    h3", "----  ----  ----", "foo   bar   foo"])
    result = tabulate(test_table, table_headers)
    assert_equal(expected, result)
github dbcli / cli_helpers / cli_helpers / tabular_output / tabulate_adapter.py View on Github external
def adapter(data, headers, table_format=None, preserve_whitespace=False,
            **kwargs):
    """Wrap tabulate inside a function for TabularOutputFormatter."""
    keys = ('floatfmt', 'numalign', 'stralign', 'showindex', 'disable_numparse')
    tkwargs = {'tablefmt': table_format}
    tkwargs.update(filter_dict_by_key(kwargs, keys))

    if table_format in supported_markup_formats:
        tkwargs.update(numalign=None, stralign=None)

    tabulate.PRESERVE_WHITESPACE = preserve_whitespace

    return iter(tabulate.tabulate(data, headers, **tkwargs).split('\n'))
github aiidateam / aiida-core / aiida / cmdline / utils / common.py View on Github external
def format_nested_links(links, headers):
    """Given a nested dictionary of nodes, return a nested string representation.

    :param links: a nested dictionary of nodes
    :param headers: headers to use
    :return: nested formatted string
    """
    import collections
    import tabulate as tb

    tb.PRESERVE_WHITESPACE = True

    indent_size = 4

    def format_recursive(links, depth=0):
        """Recursively format a dictionary of nodes into indented strings."""
        rows = []
        for label, value in links.items():
            if isinstance(value, collections.Mapping):
                rows.append([depth, label, '', ''])
                rows.extend(format_recursive(value, depth=depth + 1))
            else:
                rows.append([depth, label, value.pk, value.__class__.__name__])
        return rows

    table = []
github wimglenn / johnnydep / johnnydep / lib.py View on Github external
def serialise(self, fields=("name", "summary"), recurse=True, format=None):
        if format == "pinned":
            # user-specified fields are ignored/invalid in this case
            fields = ("pinned",)
        data = [OrderedDict([(f, getattr(self, f, None)) for f in fields])]
        if format == "human":
            table = gen_table(self, extra_cols=fields)
            tabulate.PRESERVE_WHITESPACE = True
            return tabulate.tabulate(table, headers="keys")
        if recurse and self.requires:
            deps = flatten_deps(self)
            next(deps)  # skip over root
            data += [d for dep in deps for d in dep.serialise(fields=fields, recurse=False)]
        if format is None or format == "python":
            result = data
        elif format == "json":
            result = json.dumps(data, indent=2, default=str, separators=(",", ": "))
        elif format == "yaml":
            result = oyaml.dump(data)
        elif format == "toml":
            result = "\n".join([toml.dumps(d) for d in data])
        elif format == "pinned":
            result = "\n".join([d["pinned"] for d in data])
        else:
github aiidateam / aiida-core / aiida / cmdline / utils / common.py View on Github external
rows = []
        for label, value in links.items():
            if isinstance(value, collections.Mapping):
                rows.append([depth, label, '', ''])
                rows.extend(format_recursive(value, depth=depth + 1))
            else:
                rows.append([depth, label, value.pk, value.__class__.__name__])
        return rows

    table = []

    for depth, label, pk, class_name in format_recursive(links):
        table.append(['{indent}{label}'.format(indent=' ' * (depth * indent_size), label=label), pk, class_name])

    result = '\n{}'.format(tabulate(table, headers=headers))
    tb.PRESERVE_WHITESPACE = False

    return result
github alttch / pptop / pptop / __init__.py View on Github external
import tabulate
import sys
import os
import readline
import threading

from types import SimpleNamespace

from atasker import BackgroundIntervalWorker
from atasker import background_task

from pptop.logger import log, log_traceback

top_lines = 5

tabulate.PRESERVE_WHITESPACE = True

palette = SimpleNamespace(
    DEFAULT=curses.A_NORMAL,
    BOLD=curses.A_BOLD,
    REVERSE=curses.A_REVERSE,
    DEBUG=curses.A_NORMAL,
    WARNING=curses.A_BOLD,
    ERROR=curses.A_BOLD,
    CAPTION=curses.A_BOLD,
    HEADER=curses.A_REVERSE,
    CURSOR=curses.A_REVERSE,
    BAR=curses.A_REVERSE,
    BAR_OK=curses.A_REVERSE,
    BAR_WARNING=curses.A_REVERSE | curses.A_BOLD,
    BAR_ERROR=curses.A_REVERSE | curses.A_BOLD,
    GREY=curses.A_NORMAL,
github tintinweb / ethereum-dasm / ethereum_dasm / output / console.py View on Github external
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author : 
"""
console output writer
"""


from ethereum_dasm import utils
from ethereum_dasm.utils import colors
import ethereum_dasm.asm.registry as registry
import textwrap
import tabulate
tabulate.PRESERVE_WHITESPACE = True


def format_comment_block(it, indent=0):
    s = [indent * " " + "/*******************************************************************"]
    for i in it:
        s.append((2+indent)*" "+ "%s" % i)
    s.append(indent*" "+ "*******************************************************************/")
    return "\n".join(s)


class EVMDasmPrinter:
    """ utility class for different output formats
    """

    @staticmethod
    def listing(evmcode, json=False):
github greatscottgadgets / greatfet / host / greatfet / debug / svd.py View on Github external
#
# GreatFET debug wrappers for SVD files.
# TODO: decide if this should be in libgreat?
#

import tabulate
tabulate.PRESERVE_WHITESPACE = True


class SVDGenerated(object):
    """ Generic base class for objects generated from SVDs. """

    _children           = None
    parent             = None
    _name              = None
    _read_before_write = True
    write_only         = False
    read_only          = False
    _short_type        = "SVD object"

    def __init__(self, parent):
        """ Generic constructor for SVD-generated objects. Usually not called directly. """
github VulknData / vulkn / vulkn / recordset.py View on Github external
import logging
import ciso8601
import pandas as pd 
import numpy as np
import io
import datetime
import tabulate
from typing import List


from vulkn.utils import timer


log = logging.getLogger()
np.set_printoptions(suppress=True)
tabulate.PRESERVE_WHITESPACE = True


class TypeInfoMap:
    CONVERSION_MAP = {
        'UInt8': { 'native': lambda x: int(x), 'pandas': np.dtype('u1') },
        'UInt16': { 'native': lambda x: int(x), 'pandas': np.dtype('u2') },
        'UInt32': { 'native': lambda x: int(x), 'pandas': np.dtype('u4') },
        'UInt64': { 'native': lambda x: int(x), 'pandas': np.dtype('u8') },
        'Int8': { 'native': lambda x: int(x), 'pandas': np.dtype('i1') },
        'Int16': { 'native': lambda x: int(x), 'pandas': np.dtype('i2') },
        'Int32': { 'native': lambda x: int(x), 'pandas': np.dtype('i4') },
        'Int64': { 'native': lambda x: int(x), 'pandas': np.dtype('i8') },
        'Float32': { 'native': lambda x: float(x), 'pandas': np.dtype('f4') },
        'Float64': { 'native': lambda x: float(x), 'pandas': np.dtype('f8') },
        'Decimal': { 'native': lambda x: float(x), 'pandas': np.float },
        'DateTime': { 'native': lambda x: ciso8601.parse_datetime(x), 'pandas': np.dtype('M') },