How to use the prody.LOGGER function in ProDy

To help you get started, we’ve selected a few ProDy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github prody / ProDy / prody / dynamics / heatmapper.py View on Github external
items = arr.split(':', nnums + 1)
            numbers.append(items[:nnums])
        else:
            items = [arr]
        heatmap.append(fromstring(items[-1], float, sep=';'))

    heatmap = array(heatmap)
    if nnums:
        numbering = meta['numbering']
        try:
            numbers = array(numbers, int)
        except ValueError:
            try:
                numbers = array(numbers, float)
            except ValueError:
                LOGGER.warn('Numbering for y-axis could not be parsed.')
                numbering = []
        for i, label in enumerate(numbering):
            meta[label] = numbers[:, i].copy()

    return heatmap, meta
github prody / ProDy / prody / database / goa.py View on Github external
raise TypeError('database should be a string')

    database = database.upper()
    filename = kwargs.get('filename', None)
    if filename is None:
        if database == 'UNIPROT':
            filename = 'goa_' + database.lower() + '_all.gaf.gz'
        else:
            filename = 'goa_' + database.lower() + '.gaf'

    data_folder = kwargs.get('data_folder', os.getcwd())

    # If the file doesn't already exist, download it
    gaf = os.path.join(data_folder, filename)
    if not(os.path.exists(gaf) and os.path.getsize(gaf) > 0):
        LOGGER.info('Downloading file {0} to {1}'.format(filename, gaf))
        data_stream = BytesIO()
        ftp_host = 'ftp.ebi.ac.uk'
        ftp = FTP(ftp_host)
        ftp.login()

        try:
            ftp.cwd('pub/databases/GO/goa')
            ftp.cwd(database)
            ftp.retrbinary('RETR {}.gz'.format(filename), data_stream.write)
        except:
            raise ValueError('Cannot find the requested GO association file')

        # Logout from FTP server
        ftp.quit()

        zip_data = data_stream.getvalue()
github prody / ProDy / prody / sequence / msa.py View on Github external
GAP_PENALTY, GAP_EXT_PENALTY,
                                         one_alignment_only=1)
                torf = []
                for s, c in zip(*algn[0][:2]):
                    if s == '-':
                        continue
                    elif c != '-':
                        torf.append(True)
                    else:
                        torf.append(False)
                torf = array(torf)
                tsum = torf.sum()
                assert tsum <= before, 'problem in mapping sequence to structure'
                if tsum < before:
                    arr = arr.take(torf.nonzero()[0], 1)
                    LOGGER.report('Structure refinement reduced number of '
                                  'columns from {0} to {1} in %.2fs.'
                                  .format(before, arr.shape[1]), '_refine')
                else:
                    LOGGER.debug('All residues in the sequence are contained in '
                                 'PDB structure {0}.'.format(label))

    from .analysis import calcMSAOccupancy, uniqueSequences

    rows = None
    if rowocc is not None:
        before = arr.shape[0]
        LOGGER.timeit('_refine')
        try:
            rowocc = float(rowocc)
        except Exception as err:
            raise TypeError('rowocc must be a float ({0})'.format(str(err)))
github prody / ProDy / prody / sequence / msa.py View on Github external
for poly in header['polymers']:
                    if chid and poly.chid != chid:
                        continue
                    for dbref in poly.dbrefs:
                        if index is None:
                            index = msa.getIndex(dbref.idcode)
                            if index is not None:
                                LOGGER.info('{0} idcode {1} for {2}{3} '
                                            'is found in chain {3}.'.format(
                                            dbref.database, dbref.idcode,
                                            label[:4], poly.chid, str(msa)))
                                break
                        if index is None:
                            index = msa.getIndex(dbref.accession)
                            if index is not None:
                                LOGGER.info('{0} accession {1} for {2}{3} '
                                            'is found in chain {3}.'.format(
                                            dbref.database, dbref.accession,
                                            label[:4], poly.chid, str(msa)))
                                break
                if index is not None:
                    chain = structure[poly.chid]
    
            if index is None:
                raise ValueError('label is not in msa, or msa is not indexed')
            try:
                len(index)
            except TypeError:
                pass
            else:
                raise ValueError('label {0} maps onto multiple sequences, '
                                 'so cannot be used for refinement'.format(label))
github prody / ProDy / prody / domain_decomposition / __init__.py View on Github external
# -*- coding: utf-8 -*-
"""This module defines functions for computing structural/dynamical
domain decompositions, and related properties, from either ANM modes 
or analysis of structural ensembles.
"""

import prody
LOGGER = prody.LOGGER
SETTINGS = prody.SETTINGS

__all__ = []

from . import spectrus 
__all__.extend(spectrus.__all__)
from .spectrus import *
github prody / ProDy / prody / apps / prody_apps / prody_select.py View on Github external
'until v1.4.')
        pdbs = [pdbs]

    prefix = kwargs.get('prefix', None)
    suffix = kwargs.get('suffix', '_selected')
    output = kwargs.get('output', None)

    for pdb in pdbs:
        pdb = parsePDB(pdb)

        pdbselect = pdb.select(selstr)
        if pdbselect is None:
            LOGGER.warn('Selection {0} did not match any atoms.'
                        .format(repr(selstr)))
            return
        LOGGER.info('Selection {0} matched {1} atoms.'
                    .format(repr(selstr), len(pdbselect)))

        outname = output or ((prefix or pdb.getTitle()) + suffix)
        LOGGER.info('Selection is written into: ' +
                    writePDB(outname, pdbselect))
github prody / ProDy / prody / proteins / wwpdb.py View on Github external
second = lambda filename, pdb: gunzip(getPath(pdb), getPath(pdb))


    getURL = WWPDB_HTTP_URL[wwPDBServer() or 'us']

    success = 0
    failure = 0
    filenames = []
    for pdb in identifiers:
        if pdb is None:
            filenames.append(None)
            continue
        try:
            handle = openURL(getURL(pdb))
        except Exception as err:
            LOGGER.warn('{0} download failed ({0}).'.format(pdb, str(err)))
            failure += 1
            filenames.append(None)
        else:
            data = handle.read()
            if len(data):
                filename = getPath(pdb)

                with open(filename, 'w+b') as pdbfile:
                    pdbfile.write(data)

                filename = normpath(relpath(second(filename, pdb)))
                LOGGER.debug('{0} downloaded ({1})'
                             .format(pdb, sympath(filename)))
                success += 1
                filenames.append(filename)
            else:
github prody / ProDy / prody / ensemble.py View on Github external
"""

__author__ = 'Ahmet Bakan'
__copyright__ = 'Copyright (C) 2010-2012 Ahmet Bakan'

import os
import os.path
from time import time
from struct import calcsize, unpack, pack

import numpy as np
import prody
from prody import measure
from tools import *
LOGGER = prody.LOGGER

PISQUARE = np.pi ** 2

__all__ = ['Ensemble', 'Conformation', 'PDBEnsemble', 'PDBConformation',
           'Trajectory', 'DCDFile', 'Frame',
           'EnsembleBase', 'TrajectoryBase', 'TrajectoryFile', 
           'ConformationBase',
           'saveEnsemble', 'loadEnsemble',
           'calcOccupancies', 'showOccupancies', 'trimPDBEnsemble',
           'parseDCD', 'writeDCD']
        
def checkWeightsArray(weights, n_atoms, n_csets=None):
    """Return weights if checks pass, otherwise raise an exception."""
    
    assert isinstance(n_atoms, int) and n_atoms > 0, \
        'n_atoms must be a positive integer'
github prody / ProDy / prody / proteins.py View on Github external
import gzip
import os.path
import time
import os
import shutil
import sys
from glob import glob
from collections import defaultdict

import numpy as np

BioBlast = None


import prody
LOGGER = prody.LOGGER
from prody.atomic import *


PDB_CLUSTERS = {30: None, 40: None, 50: None, 70: None, 
                90: None, 95: None, 100: None}
PDB_CLUSTERS_UPDATE_WARNING = True


__all__ = ['Chemical', 'Polymer', 'PDBBlastRecord',
           'assignSecondaryStructure', 'assignSecondaryStr',
           'applyBiomolecularTransformations', 'buildBiomolecules',
           'blastPDB', 'fetchPDB', 
           'getPDBLocalFolder', 'getPDBMirrorPath', 'getWWPDBFTPServer', 
           'setPDBLocalFolder', 'setPDBMirrorPath', 'setWWPDBFTPServer',
           'parsePDBStream', 'parsePDB', 'parsePSF', 'parsePQR',
           'writePDBStream', 'writePDB', 'writePQR',