How to use the tractor.engine.logverb function in tractor

To help you get started, we’ve selected a few tractor examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dstndstn / tractor / tractor / ceres_optimizer.py View on Github external
p0 = tractor.getParams()
        for i, k in usedParamMap.items():
            fluxes[k] = p0[i]

        iverbose = 1 if verbose else 0
        nonneg = int(nonneg)

        ithreads = 0
        if self.threads is not None:
            ithreads = int(self.threads)

        if nonneg:
            # Initial run with nonneg=False, to get in the ballpark
            x = ceres_forced_phot(blocks, fluxes, 0, iverbose, ithreads)
            assert(x == 0)
            logverb('forced phot: ceres initial run', Time() - t0)
            t0 = Time()
            if negfluxval is not None:
                fluxes = np.maximum(fluxes, negfluxval)

        x = ceres_forced_phot(blocks, fluxes, nonneg, iverbose, ithreads)
        #print('Ceres forced phot:', x)
        logverb('forced phot: ceres', Time() - t0)

        t0 = Time()
        params = np.zeros(len(p0))
        for i, k in usedParamMap.items():
            params[i] = fluxes[k]
        tractor.setParams(params)
        logverb('forced phot: unmapping params:', Time() - t0)

        if wantims1:
github dstndstn / tractor / tractor / optimize.py View on Github external
if pAfter > pBest:
                alphaBest = alpha
                pBest = pAfter

        # if alphaBest is None or alphaBest == 0:
        #     print "Warning: optimization is borking"
        #     print "Parameter direction =",X
        #     print "Parameters, step sizes, updates:"
        #     for n,p,s,x in zip(tractor.getParamNames(), tractor.getParams(), tractor.getStepSizes(), X):
        #         print n, '=', p, '  step', s, 'update', x
        if alphaBest is None:
            tractor.setParams(p0)
            return 0, 0.

        logverb('  Stepping by', alphaBest,
                'for delta-logprob', pBest - pBefore)
        pa = [p + alphaBest * d for p, d in zip(p0, X)]
        tractor.setParams(pa)
        return pBest - pBefore, alphaBest
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
# below we hstack the rows, but before doing that, remember how
        # many rows are in each chunk.
        spcols = np.array(spcols)
        nrowspercol = np.array([len(x) for x in sprows])

        if shared_params:
            # Apply shared parameter map
            #print('Before applying shared parameter map:')
            #print('spcols:', len(spcols), 'elements')
            #print('  ', len(set(spcols)), 'unique')
            spcols = paramindexmap[spcols]
            # print('After:')
            #print('spcols:', len(spcols), 'elements')
            #print('  ', len(set(spcols)), 'unique')
            Ncols = np.max(spcols) + 1
            logverb('Set Ncols=', Ncols)

        # b = chi
        #
        # FIXME -- we could be much smarter here about computing
        # just the regions we need!
        #
        if b is None:
            b = np.zeros(Nrows)

        chimap = {}
        if chiImages is not None:
            for img, chi in zip(tractor.getImages(), chiImages):
                chimap[img] = chi

        # FIXME -- could compute just chi ROIs here.
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
X = np.array(X)

        if shared_params:
            # Unapply shared parameter map -- result is duplicated
            # result elements.
            # logverb('shared_params: before, X len', len(X), 'with',
            #         np.count_nonzero(X), 'non-zero entries')
            # logverb('paramindexmap: len', len(paramindexmap),
            #         'range', paramindexmap.min(), paramindexmap.max())
            X = X[paramindexmap]
            # logverb('shared_params: after, X len', len(X), 'with',
            #         np.count_nonzero(X), 'non-zero entries')

        if scale_columns:
            X[colscales > 0] /= colscales[colscales > 0]
        logverb('  X=', X)

        if variance:
            if shared_params:
                # Unapply shared parameter map.
                var = var[paramindexmap]

            if scale_columns:
                var[colscales > 0] /= colscales[colscales > 0]**2
            return X, var

        return X
github dstndstn / tractor / tractor / ceres_optimizer.py View on Github external
#    print('umod:', umod.x0, umod.y0, type(umod.x0), type(umod.y0))
                        #    print('umod:', umod)
                        dd = (ceresparam, int(umod.x0), int(umod.y0), cmod)
                        blocks[b0 + bi][1].append(dd)
        logverb('forced phot: dicing up', Time() - t0)

        if wantims0:
            t0 = Time()
            params = tractor.getParams()
            result.ims0 = self._getims(params, imlist, umodels, mods0, scales,
                                       sky, minFlux, None)
            logverb('forced phot: ims0', Time() - t0)

        t0 = Time()
        fluxes = np.zeros(len(usedParamMap))
        logverb('Ceres forced phot:')
        logverb(len(blocks), ('image blocks (%ix%i), %i params' %
                              (self.BW, self.BH, len(fluxes))))
        if len(blocks) == 0 or len(fluxes) == 0:
            logverb('Nothing to do!')
            return
        # init fluxes passed to ceres
        p0 = tractor.getParams()
        for i, k in usedParamMap.items():
            fluxes[k] = p0[i]

        iverbose = 1 if verbose else 0
        nonneg = int(nonneg)

        ithreads = 0
        if self.threads is not None:
            ithreads = int(self.threads)
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
damp0 *= 10.
                    print('Setting damping to', damping)
                    if damp0 < 1e3:
                        tryAgain = True

            lnpBest = lnp0
            alphaBest = None
            chiBest = None

            for alpha in alphas:
                #t0 = Time()
                lnp, chis, ims = self._lnp_for_update(
                    tractor,
                    mod0, imgs, umodels, X, alpha, p0, rois, scales,
                    p0sky, Xsky, priors, sky, minFlux)
                logverb('Forced phot: stepped with alpha', alpha,
                        'for lnp', lnp, ', dlnp', lnp - lnp0)
                #logverb('Took', Time() - t0)
                if lnp < (lnpBest - 1.):
                    logverb('lnp', lnp, '< lnpBest-1', lnpBest - 1.)
                    break
                if not np.isfinite(lnp):
                    break
                if lnp > lnpBest:
                    alphaBest = alpha
                    lnpBest = lnp
                    chiBest = chis
                    imsBest = ims

            if alphaBest is not None:
                # Clamp fluxes up to zero
                if minFlux is not None:
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
lnpBest = lnp0
            alphaBest = None
            chiBest = None

            for alpha in alphas:
                #t0 = Time()
                lnp, chis, ims = self._lnp_for_update(
                    tractor,
                    mod0, imgs, umodels, X, alpha, p0, rois, scales,
                    p0sky, Xsky, priors, sky, minFlux)
                logverb('Forced phot: stepped with alpha', alpha,
                        'for lnp', lnp, ', dlnp', lnp - lnp0)
                #logverb('Took', Time() - t0)
                if lnp < (lnpBest - 1.):
                    logverb('lnp', lnp, '< lnpBest-1', lnpBest - 1.)
                    break
                if not np.isfinite(lnp):
                    break
                if lnp > lnpBest:
                    alphaBest = alpha
                    lnpBest = lnp
                    chiBest = chis
                    imsBest = ims

            if alphaBest is not None:
                # Clamp fluxes up to zero
                if minFlux is not None:
                    pa = [max(minFlux, p + alphaBest * d)
                          for p, d in zip(p0, X)]
                else:
                    pa = [p + alphaBest * d for p, d in zip(p0, X)]
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
for c, n in zip(spcols, nrowspercol):
            cc[i: i + n] = c
            i += n
        spcols = cc
        assert(i == len(sprows))
        assert(len(sprows) == len(spcols))

        logverb('  Number of sparse matrix elements:', len(sprows))
        urows = np.unique(sprows)
        ucols = np.unique(spcols)
        logverb('  Unique rows (pixels):', len(urows))
        logverb('  Unique columns (params):', len(ucols))
        if len(urows) == 0 or len(ucols) == 0:
            return []
        logverb('  Max row:', urows[-1])
        logverb('  Max column:', ucols[-1])
        logverb('  Sparsity factor (possible elements / filled elements):',
                float(len(urows) * len(ucols)) / float(len(sprows)))

        # FIXME -- does it make LSQR faster if we remap the row and column
        # indices so that no rows/cols are empty?

        # FIXME -- we could probably construct the CSC matrix ourselves!

        # Build sparse matrix
        #A = csc_matrix((spvals, (sprows, spcols)), shape=(Nrows, Ncols))
        A = csr_matrix((spvals, (sprows, spcols)), shape=(Nrows, Ncols))

        if get_A_matrix:
            return A

        lsqropts = dict(show=isverbose(), damp=damp)
github dstndstn / tractor / tractor / optimize.py View on Github external
if alphas is None:
            # 1/1024 to 1 in factors of 2, + sqrt(2.) + 2.
            alphas = np.append(2.**np.arange(-10, 1), [np.sqrt(2.), 2.])

        pBefore = tractor.getLogProb()
        logverb('  log-prob before:', pBefore)
        pBest = pBefore
        alphaBest = None
        p0 = tractor.getParams()
        for alpha in alphas:
            logverb('  Stepping with alpha =', alpha)
            pa = [p + alpha * d for p, d in zip(p0, X)]
            tractor.setParams(pa)
            pAfter = tractor.getLogProb()
            logverb('  Log-prob after:', pAfter)
            logverb('  delta log-prob:', pAfter - pBefore)

            #print('Step', alpha, 'p', pAfter, 'dlnp', pAfter-pBefore)
            
            if not np.isfinite(pAfter):
                logmsg('  Got bad log-prob', pAfter)
                break

            if pAfter < (pBest - 1.):
                break

            if pAfter > pBest:
                alphaBest = alpha
                pBest = pAfter

        # if alphaBest is None or alphaBest == 0:
        #     print "Warning: optimization is borking"
github dstndstn / tractor / tractor / lsqr_optimizer.py View on Github external
i = 0
        for c, n in zip(spcols, nrowspercol):
            cc[i: i + n] = c
            i += n
        spcols = cc
        assert(i == len(sprows))
        assert(len(sprows) == len(spcols))

        logverb('  Number of sparse matrix elements:', len(sprows))
        urows = np.unique(sprows)
        ucols = np.unique(spcols)
        logverb('  Unique rows (pixels):', len(urows))
        logverb('  Unique columns (params):', len(ucols))
        if len(urows) == 0 or len(ucols) == 0:
            return []
        logverb('  Max row:', urows[-1])
        logverb('  Max column:', ucols[-1])
        logverb('  Sparsity factor (possible elements / filled elements):',
                float(len(urows) * len(ucols)) / float(len(sprows)))

        # FIXME -- does it make LSQR faster if we remap the row and column
        # indices so that no rows/cols are empty?

        # FIXME -- we could probably construct the CSC matrix ourselves!

        # Build sparse matrix
        #A = csc_matrix((spvals, (sprows, spcols)), shape=(Nrows, Ncols))
        A = csr_matrix((spvals, (sprows, spcols)), shape=(Nrows, Ncols))

        if get_A_matrix:
            return A