我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sqlite3.Binary()。
def insert_contract(name: str, abi, bytecode: str, gas_estimates, method_identifiers, cwd): '''insert_contract into the localdb, also converts the type ''' assert name assert abi assert bytecode assert gas_estimates assert method_identifiers gas = pickle.dumps(gas_estimates) methods = pickle.dumps(method_identifiers) result = cursor.execute(insert_contract_sql, (name, str(abi), bytecode, sqlite3.Binary(gas), sqlite3.Binary(methods))) connection.commit() return result
def SavePoisonersToDb(result): for k in [ 'Poisoner', 'SentToIp', 'ForName', 'AnalyzeMode' ]: if not k in result: result[k] = '' cursor = sqlite3.connect(settings.Config.DatabaseFile) cursor.text_factory = sqlite3.Binary # We add a text factory to support different charsets res = cursor.execute("SELECT COUNT(*) AS count FROM Poisoned WHERE Poisoner=? AND SentToIp=? AND ForName=? AND AnalyzeMode=?", (result['Poisoner'], result['SentToIp'], result['ForName'], result['AnalyzeMode'])) (count,) = res.fetchone() if not count: cursor.execute("INSERT INTO Poisoned VALUES(datetime('now'), ?, ?, ?, ?)", (result['Poisoner'], result['SentToIp'], result['ForName'], result['AnalyzeMode'])) cursor.commit() cursor.close()
def hash(self, key): """Compute portable hash for `key`. :param key: key to hash :return: hash value """ mask = 0xFFFFFFFF disk_key, _ = self.put(key) type_disk_key = type(disk_key) if type_disk_key is sqlite3.Binary: return zlib.adler32(disk_key) & mask elif type_disk_key is TextType: return zlib.adler32(disk_key.encode('utf-8')) & mask # pylint: disable=no-member elif type_disk_key in INT_TYPES: return disk_key % mask else: assert type_disk_key is float return zlib.adler32(struct.pack('!d', disk_key)) & mask
def put(self, key): """Convert `key` to fields key and raw for Cache table. :param key: key to convert :return: (database key, raw boolean) pair """ # pylint: disable=bad-continuation,unidiomatic-typecheck type_key = type(key) if type_key is BytesType: return sqlite3.Binary(key), True elif ((type_key is TextType) or (type_key in INT_TYPES and -9223372036854775808 <= key <= 9223372036854775807) or (type_key is float)): return key, True else: data = pickle.dumps(key, protocol=self.pickle_protocol) result = pickletools.optimize(data) return sqlite3.Binary(result), False
def _add_record(self, sha1, subj, tset, data): if not self.is_valid(): print " Invalid TrustStore.sqlite3" return conn = sqlite3.connect(self._path) c = conn.cursor() c.execute('SELECT COUNT(*) FROM tsettings WHERE subj=?', [sqlite3.Binary(subj)]) row = c.fetchone() if row[0] == 0: c.execute('INSERT INTO tsettings (sha1, subj, tset, data) VALUES (?, ?, ?, ?)', [sqlite3.Binary(sha1), sqlite3.Binary(subj), sqlite3.Binary(tset), sqlite3.Binary(data)]) print ' Certificate added' else: c.execute('UPDATE tsettings SET sha1=?, tset=?, data=? WHERE subj=?', [sqlite3.Binary(sha1), sqlite3.Binary(tset), sqlite3.Binary(data), sqlite3.Binary(subj)]) print ' Existing certificate replaced' conn.commit() conn.close()
def loopix_mixes(): sec_params = SphinxParams(header_len=1024) dbManager.create_mixnodes_table('Mixnodes') mixes = [] pubs_mixes = [] for i in range(3): mix = LoopixMixNode(sec_params, 'Mix%d'%(i+1), 9999-i, '1.2.3.%d'%i, i) mix.transport = proto_helpers.FakeDatagramTransport() mix.config_params = mix.config_params._replace(DATABASE_NAME = 'test.db') mixes.append(mix) dbManager.insert_row_into_table('Mixnodes', [None, mix.name, mix.port, mix.host, sqlite3.Binary(petlib.pack.encode(mix.pubk)), mix.group]) pubs_mixes = [Mix(m.name, m.port, m.host, m.pubk, m.group) for m in mixes] return mixes, pubs_mixes
def loopix_providers(): sec_params = SphinxParams(header_len=1024) dbManager.create_providers_table('Providers') providers = [] pubs_providers = [] for i in range(3): p = LoopixProvider(sec_params, 'Provider%d'%(i+1), 9995-i, '1.2.%d.4'%i) p.transport = proto_helpers.FakeDatagramTransport() p.config_params = p.config_params._replace(DATABASE_NAME = 'test.db') providers.append(p) dbManager.insert_row_into_table('Providers', [None, p.name, p.port, p.host, sqlite3.Binary(petlib.pack.encode(p.pubk))]) pubs_providers = [Provider(p.name, p.port, p.host, p.pubk) for p in providers] return providers, pubs_providers
def loopix_clients(pubs_providers, pubs_mixes): sec_params = SphinxParams(header_len=1024) dbManager.create_users_table('Users') clients = [] pubs_clients = [] for i in range(3): provider = pubs_providers[i] c = LoopixClient(sec_params, 'Client%d'%(i+1), 9993 - i, '1.%d.3.4'%i, provider.name) c.register_mixes(pubs_mixes) c.transport = proto_helpers.FakeDatagramTransport() c.config_params = c.config_params._replace(DATABASE_NAME = 'test.db') c.provider = dbManager.select_provider_by_name(provider.name) clients.append(c) dbManager.insert_row_into_table('Users', [None, c.name, c.port, c.host, sqlite3.Binary(petlib.pack.encode(c.pubk)), c.provider.name]) pubs_clients = [User(c.name, c.port, c.host, c.pubk, c.provider) for c in clients] return clients, pubs_clients
def get_geo_page(self, gse, maxlag=7): sql = """select * from geo_pages where gse=? and checked_date + ? > julianday(datetime('now')) order by checked_date desc limit 1""" cur = self.cache_db.execute(sql, (gse, maxlag)) data = cur.fetchall() if len(data) > 0: return zlib.decompress(bytes(data[0][2])) try: data = urllib2.urlopen(self._gse2url(gse)).read() except Exception as e: print "Error while fetching data for GSE: ", gse print "URL: ", self._gse2url(gse) print e raise e data_compr = zlib.compress(data) self.cache_db.execute("insert into geo_pages(gse, checked_date, value) values (?, julianday(datetime('now')), ?)", (gse, sqlite3.Binary(data_compr))) self.cache_db.commit() return data
def update(self): try: _con = lite.connect(self.filename) _cur = _con.cursor() _blob = lite.Binary(self.data) # _cur.execute("UPDATE [Files] SET [Date]='%s',[Version]='%s',[Data]='%s',[Comment]='%s' WHERE [FileID]='%s'" # %(self.date,self.version,_blob,self.comment,self.file_id)) _sql = "UPDATE [Files] SET [Date]=?,[Version]=?,[Data]=?,[Comment]=? WHERE FileID=?" _cur.execute(_sql,(str(self.date), str(self.version), _blob ,str(self.comment),str(self.file_id))) _con.commit() lastRowID = _cur.lastrowid _con.close() except Exception as _err: print(_err) return False return lastRowID
def record_metadata_solver(self, recording_requester): """ Record solver metadata. Parameters ---------- recording_requester: <Solver> The Solver that would like to record its metadata. """ path = recording_requester._system.pathname solver_class = type(recording_requester).__name__ if not path: path = 'root' id = "{}.{}".format(path, solver_class) solver_options = pickle.dumps(recording_requester.options, pickle.HIGHEST_PROTOCOL) with self.con: self.con.execute( "INSERT INTO solver_metadata(id, solver_options, solver_class) " "VALUES(?,?,?)", (id, sqlite3.Binary(solver_options), solver_class))
def add_func_info(self, file_id, func): self.__cursor.execute(""" SELECT rowid FROM functions WHERE file_id = ? AND virtual_address = ? """, (file_id, func.virtual_address)) res = self.__cursor.fetchone() if res: return self.__cursor.execute(""" INSERT INTO functions (file_id, virtual_address, name, size, bytes) VALUES (?, ?, ?, ?, ?) """, (file_id, func.virtual_address, func.name, func.size, sqlite3.Binary(func.bytes.read())))
def __setitem__(self, key, response): super(DbPickleDict, self).__setitem__(key, sqlite.Binary(self.serializer.dumps(response.raw, body=response.content)))
def typeconv(obj): # type: (OptionValue) -> OptionValue """If obj is a string that looks like number or boolean value, its value is returned. Binary strings are returned as unicode strings, everything else is unchanged. """ if not isinstance(obj, string_types): return obj if obj == 'true': return True if obj == 'false': return False if isinstance(obj, string_types): text = unistr(obj) # type: str if re_number.match(text): return int(text) if isinstance(obj, binary_type): text = unistr(obj) return text return obj
def put(self, key, value): # type: (str, bytes) -> None with self.conn as conn: conn.execute(self.kv_put, (key, sqlite3.Binary(value)))
def CheckBinary(self): with test_support.check_py3k_warnings(): b = sqlite.Binary(chr(0) + "'")
def insert_tweet(status_id, tweet, bot_flag=0): conn = sqlite3.connect(DB_NAME) binary_data = pickle.dumps(tweet, pickle.HIGHEST_PROTOCOL) c = conn.cursor() c.execute("insert into tweets (sid, data, bot_flag) values (?, ?, ?)", [status_id, sqlite3.Binary(binary_data), bot_flag]) conn.commit() conn.close()
def CheckBinary(self): b = sqlite.Binary(chr(0) + "'")
def blob (obj): sqlite3.Binary (util.serialize (obj))
def __adapt_array(self, arr): """ https://stackoverflow.com/ questions/18621513/python-insert-numpy-array-into-sqlite3-database """ out = io.BytesIO() np.save(out, arr) out.seek(0) return sqlite3.Binary(out.read())
def get(self, key, raw): """Convert fields `key` and `raw` from Cache table to key. :param key: database key to convert :param bool raw: flag indicating raw database storage :return: corresponding Python key """ # pylint: disable=no-self-use,unidiomatic-typecheck if raw: return BytesType(key) if type(key) is sqlite3.Binary else key else: return pickle.load(BytesIO(key))
def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to value. :param int mode: value mode raw, binary, text, or pickle :param str filename: filename of corresponding value :param value: database value :param bool read: when True, return an open file handle :return: corresponding Python value """ # pylint: disable=no-self-use,unidiomatic-typecheck if mode == MODE_RAW: return BytesType(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: if read: return open(op.join(self._directory, filename), 'rb') else: with open(op.join(self._directory, filename), 'rb') as reader: return reader.read() elif mode == MODE_TEXT: full_path = op.join(self._directory, filename) with io_open(full_path, 'r', encoding='UTF-8') as reader: return reader.read() elif mode == MODE_PICKLE: if value is None: with open(op.join(self._directory, filename), 'rb') as reader: return pickle.load(reader) else: return pickle.load(BytesIO(value))
def _sqlite_count(iterable): """Return the number non-NULL (!= None) elements in iterable.""" if isinstance(iterable, BaseElement): iterable = [iterable] return sum(1 for x in iterable if x != None) # The SQLite BLOB/Binary type in sortable Python 2 but unsortable in Python 3.
def _sqlite_sortkey(value): """Key function for use with sorted(), min(), max(), etc. that makes a best effort to match SQLite ORDER BY behavior for supported classes. From SQLite docs: "...values with storage class NULL come first, followed by INTEGER and REAL values interspersed in numeric order, followed by TEXT values in collating sequence order, and finally BLOB values in memcmp() order." For more details see "Datatypes In SQLite Version 3" section "4.1. Sort Order" <https://www.sqlite.org/datatype3.html>. """ if value is None: # NULL (sort group 0) return (0, 0) if isinstance(value, Number): # INTEGER and REAL (sort group 1) return (1, value) if isinstance(value, string_types): # TEXT (sort group 2) return (2, value) if isinstance(value, Binary): # BLOB (sort group 3) if _unsortable_blob_type: value = bytes(value) return (3, value) return (4, value) # unsupported type (sort group 4)
def CheckBinary(self): b = sqlite.Binary(b"\0'")
def set_hash_ids(self, cursor, hash_ids): """Set the ids of a set of hashes. @param hash_ids: a C{dict} of hash=>id mappings. """ for hash, id in iteritems(hash_ids): cursor.execute("REPLACE INTO hash VALUES (?, ?)", (id, sqlite3.Binary(hash)))
def get_hash_id(self, cursor, hash): """Return the id associated to C{hash}, or C{None} if not available. @param hash: a C{bytes} representing a hash. """ cursor.execute("SELECT id FROM hash WHERE hash=?", (sqlite3.Binary(hash),)) value = cursor.fetchone() if value: return value[0] return None
def add_hash_id_request(self, cursor, hashes): hashes = list(hashes) cursor.execute("INSERT INTO hash_id_request (hashes, timestamp)" " VALUES (?,?)", (sqlite3.Binary(bpickle.dumps(hashes)), time.time())) return HashIDRequest(self._db, cursor.lastrowid)
def add_task(self, cursor, queue, data): data = bpickle.dumps(data) cursor.execute( "INSERT INTO task (queue, timestamp, data) VALUES (?,?,?)", (queue, time.time(), sqlite3.Binary(data))) return PackageTask(self._db, cursor.lastrowid)
def save_message(self, cursor, message): cursor.execute("INSERT INTO message (data) VALUES (?)", (sqlite3.Binary(bpickle.dumps(message)),))
def pack_blob(obj): return sqlite3.Binary(zdumps(obj))
def encode(obj): """Serialize an object using pickle to a binary format accepted by SQLite.""" return sqlite3.Binary(dumps(obj, protocol=PICKLE_PROTOCOL))
def sink_ml(self, voter_id, matrix): with sqlite3.connect(self.db_name) as conn: c = conn.cursor() timestamp = time.time() blob_matrix = cPickle.dumps(matrix, cPickle.HIGHEST_PROTOCOL) c.execute( 'INSERT INTO smls (timestamp, voter_id, sml) VALUES(?, ?, ?);', [timestamp, voter_id, sqlite3.Binary(blob_matrix)]) conn.commit()
def add_chunk(self, chunk): self._exec((("insert into chunks values (0, 0, 0, ?)", (sqlite3.Binary(chunk.to_bytes()),))))
def write_params_hlr(opts, params): # SQLite3 OpenBSC HLR if opts.write_hlr: import sqlite3 conn = sqlite3.connect(opts.write_hlr) c = conn.execute( 'INSERT INTO Subscriber ' + '(imsi, name, extension, authorized, created, updated) ' + 'VALUES ' + '(?,?,?,1,datetime(\'now\'),datetime(\'now\'));', [ params['imsi'], params['name'], '9' + params['iccid'][-5:-1] ], ) sub_id = c.lastrowid c.close() c = conn.execute( 'INSERT INTO AuthKeys ' + '(subscriber_id, algorithm_id, a3a8_ki)' + 'VALUES ' + '(?,?,?)', [ sub_id, 2, sqlite3.Binary(_dbi_binary_quote(h2b(params['ki']))) ], ) conn.commit() conn.close()
def write_changelog_entry(self, serial, entry): threadlog.debug("writing changelog for serial %s", serial) data = dumps(entry) self._sqlconn.execute( "INSERT INTO changelog (serial, data) VALUES (?, ?)", (serial, sqlite3.Binary(data)))
def io_file_set(self, path, content): assert not os.path.isabs(path) assert not path.endswith("-tmp") c = self._sqlconn.cursor() q = "INSERT OR REPLACE INTO files (path, size, data) VALUES (?, ?, ?)" c.execute(q, (path, len(content), sqlite3.Binary(content))) c.close()
def _adapt_array(arr): """ Converts np.array to TEXT when inserting http://stackoverflow.com/a/31312102/190597 (SoulNibbler) """ out = io.BytesIO() np.save(out, arr) out.seek(0) return sqlite3.Binary(out.read())
def _set(self, item_id, item): def _encode(obj): return sqlite3.Binary(pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)) self._open() now = datetime.datetime.now() if not now.microsecond: # now is to the second now += datetime.timedelta(microseconds=1) # add 1 microsecond, required for dbapi2 query = 'REPLACE INTO %s (key,time,value) VALUES(?,?,?)' % self._table_name self._execute(True, query, values=[item_id, now, _encode(item)]) self._optimize_item_count() self._close()
def procdata(worker,connque): conn = sqlite3.connect('site_data.db') conn.execute("create table if not exists mainpages (id integer primary key autoincrement, url TEXT,headers TEXT,content BLOB)") spend = 0 cnt = 0 size = 0 while True: st = time.time() seed,headers,content = connque.get() urls = worker.geturls(seed,content) urls = worker.debug_filter(urls) for url in urls: if url not in worker.bfdone: worker.run_queue.put(url) #content = content.decode(procdata_getencoding(seed, headers,content)) gziphtml = sqlite3.Binary(gzip.zlib.compress(content)) size += len(gziphtml) conn.execute("insert into mainpages (url,headers,content) values (?,?,?)",(seed,str(headers),gziphtml)) et = time.time() spend += (et-st) cnt += 1 if cnt % 100 == 0: print "cost:", spend/cnt, cnt, connque.qsize(), size / 1024 / 1024 conn.commit()
def main(): done_que = RedisQueue('seed') run_que = RedisQueue('run') run_que.flushdb() conn = sqlite3.connect('site_data.db') conn.execute("create table if not exists mainpages (id integer primary key autoincrement, url TEXT,headers TEXT,content BLOB)") spend = 0 cnt = 0 size = 0 while True: data = cPickle.loads(done_que.get()) st = time.time() urls = geturls(data['url'],data['content']) if len(urls) == 0: continue for url in urls: if url not in bfdone: run_que.put(url) gziphtml = sqlite3.Binary(gzip.zlib.compress(data['content'])) size += len(gziphtml) conn.execute("insert into mainpages (url,headers,content) values (?,?,?)",(data['url'],str(data['headers']),gziphtml)) et = time.time() spend += (et-st) cnt += 1 if cnt % 10 == 0: print "cost:", spend/cnt, cnt, done_que.qsize(), size / 1024 / 1024 conn.commit()