How to use bsddb3 - 10 common examples

To help you get started, we’ve selected a few bsddb3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gitb0y / sitestalker / sitestalker.py View on Github external
print "Unable to create a html output folder at " + config[group]['html_dir'] + ". Check permissions or create it first.."
        	    sys.exit(0)

            if not os.path.exists(config[group]['db_dir']) or not os.path.isdir(config[group]['db_dir']):
	        if args.verbose: print ">>> Creating database file directory " + config[group]['db_dir']
	        try:
		    os.makedirs(config[group]['db_dir'])
	        except:
		    print "Unable to create a database folder at " + config[group]['db_dir'] + ". Check permissions or create it first.."
        	    exit(0)

# SETUP THE DATABASE ENVIRONMENT, OPEN THREADED DATABASE FOR ASYNCHRONOUS READ/WRITES
	    db_path = os.path.join(os.getcwd(),config[group]['db_dir'],config[group]['db_file'])
	    html_path = os.path.join(config[group]['html_dir'],'index.html')
	    dbenv = db.DBEnv()
	    dbenv.open(config[group]['db_dir'], db.DB_INIT_LOCK | db.DB_INIT_MPOOL | db.DB_CREATE | db.DB_THREAD , 0)
	    stalkerdb = db.DB(dbenv)
	    if args.verbose: print ">>> Opening database file " + db_path
	    db_handle = stalkerdb.open(db_path, None, db.DB_HASH, db.DB_CREATE | db.DB_THREAD  )
	    if db_handle == None:
	        if args.verbose: print ">>> Database open successful..."
	    else:
	        print "Database open failed. (" + str(db_handle) + ") Exiting.."
	        exit()
	


        thread_count = 0	
	processed_urls = []
# PROCESS INPUT FILE
        if args.infile:
          if group == args.group_name:
github RDFLib / rdflib / rdflib / plugins / sleepycat.py View on Github external
contexts_value = cspo.get(
            b("^").join([b(""), s, p, o, b("")]), txn=txn) or b("")
        contexts = set(contexts_value.split(b("^")))
        contexts.discard(c)
        contexts_value = b("^").join(contexts)
        for i, _to_key, _from_key in self.__indicies_info:
            i.delete(_to_key((s, p, o), c), txn=txn)
        if not quoted:
            if contexts_value:
                for i, _to_key, _from_key in self.__indicies_info:
                    i.put(_to_key((s, p, o), b("")), contexts_value, txn=txn)
            else:
                for i, _to_key, _from_key in self.__indicies_info:
                    try:
                        i.delete(_to_key((s, p, o), b("")), txn=txn)
                    except db.DBNotFoundError:
                        pass  # TODO: is it okay to ignore these?
github tdhock / SegAnnDB / plotter / db.py View on Github external
if cls.RE_LEN:
                cls.db.set_re_len(cls.RE_LEN)
            cls.db.open(cls.filename, None, cls.DBTYPE,
                        bsddb3.db.DB_AUTO_COMMIT |
                        # bsddb3.db.DB_THREAD|
                        bsddb3.db.DB_CREATE)
            CLOSE_ON_EXIT.append(cls.db)

def rename_all(find, replace):
    for cls in DB_CLASSES:
        if all([k in cls.keys for k in find.keys()]):
            cls.rename_all(find, replace)

class Resource(object):
    __metaclass__ = DB
    DBTYPE = bsddb3.db.DB_BTREE
    RE_LEN = 0

    @classmethod
    def all(cls):
        return [cls(*tup).get() for tup in cls.db_key_tuples()]

    @classmethod
    def db_keys(cls):
        return cls.db.keys()

    @classmethod
    def db_key_tuples(cls):
        return [k.split(" ") for k in cls.db_keys()]

    def rename(self, **kwargs):
        """Read data for this key, delete that db entry, and save it under another key"""
github gramps-project / gramps / gramps / plugins / db / bsddb / write.py View on Github external
def __check_bdb_version(self, name, force_bsddb_upgrade=False,
                            force_bsddb_downgrade=False):
        """Older version of Berkeley DB can't read data created by a newer
        version."""
        bdb_version = db.version()
        versionpath = os.path.join(self.path, str(BDBVERSFN))
        # Compare the current version of the database (bsddb_version) with the
        # version of the database code (env_version). If it is a downgrade,
        # raise an exception because we can't do anything. If they are the same,
        # return. If it is an upgrade, raise an exception unless  the user has
        # already told us we can upgrade.
        if os.path.isfile(versionpath):
            with open(versionpath, "r") as version_file:
                bsddb_version = version_file.read().strip()
                env_version = tuple(map(int, bsddb_version[1:-1].split(', ')))
        else:
            # bsddb version is unknown
            bsddb_version = "Unknown"
            env_version = "Unknown"
#        _LOG.debug("db version %s, program version %s" % (bsddb_version, bdb_version))
github jsxc / xmpp-cloud-auth / xclib / dbmops.py View on Github external
def perform(args):
    domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
    if args.get:
        print(unutf8(domain_db[utf8(args.get)], 'illegal'))
    elif args.put:
        domain_db[utf8(args.put[0])] = args.put[1]
    elif args.delete:
        del domain_db[utf8(args.delete)]
    elif args.unload:
        for k in list(domain_db.keys()):
            print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
        # Should work according to documentation, but doesn't
        # for k, v in DOMAIN_DB.iteritems():
        #     print k, '\t', v
    elif args.load:
        for line in sys.stdin:
            k, v = line.rstrip('\r\n').split('\t', 1)
            domain_db[utf8(k)] = v
github jsxc / xmpp-cloud-auth / xclib / db.py View on Github external
authdomain TEXT,
                      regcontact TEXT,
                      regfirst   TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                      reglatest  TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
        except sqlite3.OperationalError as e:
            logging.warning('Cannot create `domains` table; maybe multiple processes started in parallel? %s' % str(e))
            # Try to get out of the way of a parallel updater
            time.sleep(1)
            # Someone else already created the table; he probably also
            # migrated it
            return
        try:
            if olddb is None:
                return
            elif isinstance(olddb, str):
                db = bsddb3.hashopen(olddb, 'r')
            else: # dict
                db = olddb
            for k,v in db.items():
                k = unutf8(k, 'illegal')
                v = unutf8(v, 'illegal')
                try:
                    (authsecret, authurl, authdomain, extra) = v.split("\t", 3)
                except ValueError:
                    (authsecret, authurl, authdomain) = v.split("\t", 2)
                    extra = None
                self.conn.execute('''INSERT INTO domains (xmppdomain, authsecret, authurl, authdomain) VALUES (?, ?, ?, ?)''', (k, authsecret, authurl, authdomain))
            if isinstance(olddb, str):
                db.close()
        except bsddb3.db.DBError as e:
            logging.error('Trouble converting %s: %s' % (olddb, e))
github jsxc / xmpp-cloud-auth / xclib / db.py View on Github external
def db_upgrade_cache(self, olddb):
        logging.debug('Upgrading cache from %s' % olddb)
        try:
            if olddb is None:
                return
            elif isinstance(olddb, str):
                db = bsddb3.hashopen(olddb, 'r')
            else: # dict
                db = olddb
            for k,v in db.items():
                k = unutf8(k, 'illegal').replace(':', '@')
                v = unutf8(v, 'illegal')
                (pwhash, ts1, tsv, tsa, rest) = v.split("\t", 4)
                ts1 = datetime.utcfromtimestamp(int(ts1))
                tsv = datetime.utcfromtimestamp(int(tsv))
                tsa = datetime.utcfromtimestamp(int(tsa))
                # First import goes into persistent database
                self.conn.execute('''INSERT INTO authcache (jid, pwhash, firstauth, remoteauth, anyauth)
                     VALUES (?, ?, ?, ?, ?)''', (k, pwhash, ts1, tsv, tsa))
            if isinstance(olddb, str):
                db.close()
        except bsddb3.db.DBError as e:
            logging.error('Trouble converting %s: %s' % (olddb, e))
github kanzure / nanoengineer / packaging / Pref_Mod / preferences.py View on Github external
and create the cache of its contents,
    and store a comment there about this process,
    and close the shelf again in case a concurrent process is sharing the same shelf with us.
    """
    global _shelfname, _shelf, _cache, _defaults, _trackers
    nanorex = find_or_make_Nanorex_directory()
    global dbname
    _shelfname = os.path.join( nanorex, "Preferences", "%s-shelf" % dbname )
        # This name should differ when db format differs.
        # Note: the actual filename used might have an extension added
        # by the db module (in theory, it might even create two files
        # with different extensions from the given basename).
        # By experiment, on the Mac, with bsddb there is no extension added,
        # and without it there is '.db' added. [bruce 050105]
    mkdirs_in_filename(_shelfname)
    _shelf = shelve.open(_shelfname.encode("utf_8"))
    _cache = {}
    _cache.update(_shelf) # will this work?
    was_just_made = (not _cache) #bruce 080505
    if was_just_made:
        print u"made prefs db, basename", _shelfname.encode("utf_8")
    else:
        print u"prefs db already existed, basename", _shelfname.encode("utf_8")
    _defaults = {}
    _trackers = {}
    # zap obsolete contents
    obskeys = []
    for key in _cache.keys():
        if key.isdigit() or key in ['_session_counter']:
            obskeys.append(key)
    for key in obskeys:
        del _shelf[key]
github heni / rem / rem / journal.py View on Github external
def get_filenames():
            result = []
            for filename in os.listdir(dirname):
                if filename.startswith(db_filename) and filename != db_filename:
                    file_time = int(filename.split("-")[-1])
                    if file_time > timestamp:
                        result.append(filename)
            result = sorted(result)
            if os.path.isfile(self.db_file):
                result += [db_filename]
            return result

        with self.lock:
            self.restoring_mode = True
            for filename in get_filenames():
                f = bsddb3.rnopen(os.path.join(dirname, filename), "r")
                for k, v in f.items():
                    try:
                        obj = cPickle.loads(v)
                        obj.Redo(self)
                    except Exception, e:
                        logging.exception("occurred in TagLogger while restoring from a journal : %s", e)
                f.close()
            self.restoring_mode = False
github connectIOT / iottoolkit / iottoolkit / rdflib3 / plugins / sleepycat.py View on Github external
cursor = index.cursor(txn=txn)
            try:
                current = cursor.set_range(prefix)
                needs_sync = True
            except db.DBNotFoundError:
                current = None
                needs_sync = False
            cursor.close()
            while current:
                key, value = current
                cursor = index.cursor(txn=txn)
                try:
                    cursor.set_range(key)
                    # Hack to stop 2to3 converting this to next(cursor)
                    current = getattr(cursor, 'next')()
                except db.DBNotFoundError:
                    current = None
                cursor.close()
                if key.startswith(prefix):
                    c, s, p, o = from_key(key)
                    if context is None:
                        contexts_value = index.get(key, txn=txn) or b("")
                        contexts = set(contexts_value.split(b("^"))) # remove triple from all non quoted contexts
                        contexts.add(b("")) # and from the conjunctive index
                        for c in contexts:
                            for i, _to_key, _ in self.__indicies_info:
                                i.delete(_to_key((s, p, o), c), txn=txn)
                    else:
                        self.__remove((s, p, o), c, txn=txn)
                else:
                    break