我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sqlite3.Row()。
def retrieve_garden_from_db(self): # Builds a dict of dicts from garden sqlite db garden_dict = {} conn = sqlite3.connect(self.garden_db_path) # Need to allow write permissions by others conn.row_factory = sqlite3.Row c = conn.cursor() c.execute('SELECT * FROM garden ORDER BY owner') tuple_list = c.fetchall() conn.close() # Building dict from table rows for item in tuple_list: garden_dict[item[0]] = { "owner":item[1], "description":item[2], "age":item[3], "score":item[4], "dead":item[5], } return garden_dict
def CheckSqliteRowHashCmp(self): """Checks if the row object compares and hashes correctly""" self.con.row_factory = sqlite.Row row_1 = self.con.execute("select 1 as a, 2 as b").fetchone() row_2 = self.con.execute("select 1 as a, 2 as b").fetchone() row_3 = self.con.execute("select 1 as a, 3 as b").fetchone() self.assertEqual(row_1, row_1) self.assertEqual(row_1, row_2) self.assertTrue(row_2 != row_3) self.assertFalse(row_1 != row_1) self.assertFalse(row_1 != row_2) self.assertFalse(row_2 == row_3) self.assertEqual(row_1, row_2) self.assertEqual(hash(row_1), hash(row_2)) self.assertNotEqual(row_1, row_3) self.assertNotEqual(hash(row_1), hash(row_3))
def DBStatistics(db_file): """ Prints the following DB statistics and info: Name SolutionsGenerated DesignsFound SolutionsGenerated/DesignsFound """ #### Create connection to DB con = sqlite3.connect(db_file) con.isolation_level = None con.row_factory = sqlite3.Row cur = con.cursor() #get statistics cur.execute("select (select count(1) from desired_solution where status = 'DONE') as 'Designs Found', (select count(1) from generated_solution) as 'Solutions Generated'") #print header print "DB name\tNum. of generated solutions\tNum. of combinations\tRatio" result = cur.fetchone() if result['Solutions Generated'] != 0: print db_file , "\t" , result['Solutions Generated'] , "\t" , result['Designs Found'] , "\t" , result['Designs Found']/float(result['Solutions Generated']) con.close()
def __connect(self): """ Private function for connecting and setting return type for select ops. Raises a DbConnectionError when fails to connect. """ if self.conn: logging.info("AuthorizationDataManager: Reconnecting to %s on request", self.db_path) self.__close() try: self.conn = sqlite3.connect(self.db_path) except sqlite3.Error as e: logging.error("Failed to connect to DB (%s): %s", self.db_path, e) raise DbConnectionError(self.db_path) # Use return rows as Row instances instead of tuples self.conn.row_factory = sqlite3.Row
def read_tokens(self, db) -> Iterator[DeviceConfig]: """Read device information out from a given database file. :param str db: Database file""" self.db = db _LOGGER.info("Reading database from %s" % db) self.conn = sqlite3.connect(db) self.conn.row_factory = sqlite3.Row with self.conn: is_android = self.conn.execute( "SELECT name FROM sqlite_master WHERE type='table' AND name='devicerecord';").fetchone() is not None is_apple = self.conn.execute( "SELECT name FROM sqlite_master WHERE type='table' AND name='ZDEVICE'").fetchone() is not None if is_android: yield from self.read_android() elif is_apple: yield from self.read_apple() else: _LOGGER.error("Error, unknown database type!")
def connect(self): if self not in opened_dbs: opened_dbs.append(self) self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version)) if not os.path.isdir(self.db_dir): # Directory not exist yet os.makedirs(self.db_dir) self.log.debug("Created Db path: %s" % self.db_dir) if not os.path.isfile(self.db_path): self.log.debug("Db file not exist yet: %s" % self.db_path) self.conn = sqlite3.connect(self.db_path) self.conn.row_factory = sqlite3.Row self.conn.isolation_level = None self.cur = self.getCursor() # We need more speed then security self.cur.execute("PRAGMA journal_mode = WAL") self.cur.execute("PRAGMA journal_mode = MEMORY") self.cur.execute("PRAGMA synchronous = OFF") if self.foreign_keys: self.execute("PRAGMA foreign_keys = ON") # Execute query using dbcursor
def _run_search(db_ref: PathInfo, sql_query: Text, query_bindings: Iterable[Text] ) -> Iterable[NamedTuple]: """ Returns the search results as a list of NamedTuples of records. Accepts -- db_path: database file path, sql_query: a formed SQL query, query_bindings: list of attributes for the query. """ with open(app_inf_path, 'r') as read_json_obj: app_inf = json.load(read_json_obj) DBRecord = namedtuple('DBRecord', app_inf['search_fieldnames']) try: query_results = db_ref.execute(sql_query, query_bindings) except AttributeError: with sqlite3.connect(db_ref) as sink_conn: sink_conn.row_factory = sqlite3.Row query_results = sink_conn.execute(sql_query, query_bindings) return query_results return [DBRecord(*result) for result in query_results]
def establish_benchmark(profile_dbs): incr = helpers.incrementer() profile_records_dict = odict() for profile_name, profile_path in profile_dbs.items(): conn = sqlite3.connect(profile_path) conn.row_factory = sqlite3.Row cur = conn.cursor() query = '''SELECT * FROM moz_places''' try: cur.execute(query) except sqlite3.OperationalError: pass else: profile_records_dict.update({profile_name: [dict(row) for row in cur]}) finally: conn.close() merged_as_dict = odict({ info['url_hash']: odict(info) for profile_info in profile_records_dict.values() for info in profile_info }) return merged_as_dict
def edit_table(): from datetime import datetime as dt from collections import OrderedDict as odict moz_places_fields = ('id', 'url', 'title', 'rev_host', 'visit_count', 'hidden', 'typed', 'favicon_id', 'frecency', 'last_visit_date', 'guid', 'foreign_count', 'url_hash', 'description', 'preview_image_url', 'last_visit_date_readable' ) bindings_placeholders = '?, ' * len(moz_places_fields) with sqlite3.connect('db_for_testing_search.sqlite') as source_conn: source_conn.row_factory = sqlite3.Row query_source_result = source_conn.execute('SELECT * FROM moz_places') with sqlite3.connect('db_for_testing_search_new.sqlite') as sink_conn: try: query_sink_result = sink_conn.execute(f"CREATE TABLE moz_places ({', '.join(moz_places_fields)})") except Exception as excep: print(excep) finally: for row in query_source_result : row = odict(row) row.setdefault('last_visit_date_readable', None) try: row['last_visit_date_readable'] = dt.fromtimestamp(row['last_visit_date'] // 10**6).strftime('%x %X') except TypeError: pass sink_conn.execute(f'INSERT INTO moz_places VALUES ({bindings_placeholders[:-2]})', row)
def CheckSqliteRowIndex(self): self.con.row_factory = sqlite.Row row = self.con.execute("select 1 as a, 2 as b").fetchone() self.assertTrue(isinstance(row, sqlite.Row), "row is not instance of sqlite.Row") col1, col2 = row["a"], row["b"] self.assertTrue(col1 == 1, "by name: wrong result for column 'a'") self.assertTrue(col2 == 2, "by name: wrong result for column 'a'") col1, col2 = row["A"], row["B"] self.assertTrue(col1 == 1, "by name: wrong result for column 'A'") self.assertTrue(col2 == 2, "by name: wrong result for column 'B'") col1, col2 = row[0], row[1] self.assertTrue(col1 == 1, "by index: wrong result for column 0") self.assertTrue(col2 == 2, "by index: wrong result for column 1")
def CheckSqliteRowHashCmp(self): """Checks if the row object compares and hashes correctly""" self.con.row_factory = sqlite.Row row_1 = self.con.execute("select 1 as a, 2 as b").fetchone() row_2 = self.con.execute("select 1 as a, 2 as b").fetchone() row_3 = self.con.execute("select 1 as a, 3 as b").fetchone() self.assertTrue(row_1 == row_1) self.assertTrue(row_1 == row_2) self.assertTrue(row_2 != row_3) self.assertFalse(row_1 != row_1) self.assertFalse(row_1 != row_2) self.assertFalse(row_2 == row_3) self.assertEqual(row_1, row_2) self.assertEqual(hash(row_1), hash(row_2)) self.assertNotEqual(row_1, row_3) self.assertNotEqual(hash(row_1), hash(row_3))
def __init__(self, root_dir, writeable=False): """Creates a new sequence repository if necessary, and then opens it""" self._root_dir = root_dir self._db_path = os.path.join(self._root_dir, "db.sqlite3") self._writing = None self._db = None self._writeable = writeable if self._writeable: makedirs(self._root_dir, exist_ok=True) self._upgrade_db() self._db = sqlite3.connect(self._db_path) schema_version = self.schema_version() self._db.row_factory = sqlite3.Row # if we're not at the expected schema version for this code, bail if schema_version != expected_schema_version: raise RuntimeError("""Upgrade required: Database schema version is {} and code expects {}""".format(schema_version, expected_schema_version)) # ############################################################################ # Special methods
def __init__(self, db_path, writeable=False): self._db_path = db_path self._db = None self._writeable = writeable if self._writeable: self._upgrade_db() self._db = sqlite3.connect(self._db_path) schema_version = self.schema_version() self._db.row_factory = sqlite3.Row # if we're not at the expected schema version for this code, bail if schema_version != expected_schema_version: # pragma: no cover raise RuntimeError("Upgrade required: Database schema" "version is {} and code expects {}".format(schema_version, expected_schema_version)) # ############################################################################ # Special methods
def initUser(): #?????? 6?????? cof = returnConfig() inituser_time = cof.get("inituser_time",False) #????????????? 24?????????? if not inituser_time or time.time() - float(cof['inituser_time']) > 60*60*6: #???????? conn = sqlite3.connect('lootan.db') #??keys? # conn.row_factory = sqlite3.Row cur = conn.cursor() cur.execute('update `wwf_sender` set `give_num` = %s ,`begging_num` = %s WHERE 1' % (cof['give_num'],cof['begging_num'])) #??100??????500 cur.execute('update `wwf_sender` set `gold_num` = %s WHERE `gold_num` <= 100 ' % (cof['gold_base'])) conn.commit() #????????? cp = ConfigParser.SafeConfigParser() cp.read('lootan.ini') cp.set('baseconf', 'inituser_time',str(time.time())) cp.write(open('lootan.ini', 'w'))
def getlastread(): lastmsg = {} conn = sqlite3.connect('lootan.db') #??keys? conn.row_factory = sqlite3.Row cur = conn.cursor() #????????????????? cur.execute('SELECT * FROM `wwf_log` WHERE 1 order by `dateline` desc limit 1') conn.commit() doinfo = cur.fetchone() #??????????????? if doinfo ==None: return None # print doinfo.keys() for key in doinfo.keys(): # print '%s=>%s' % (key,doinfo[key]) lastmsg[key] = doinfo[key] # print doinfo return lastmsg #?????????
def addkeyinfo(self, issuer, key_id, public_key, cache_timer=0, next_update=0): """ Add a single, known public key to the cache. :param str issuer: URI of the issuer :param str key_id: Key Identifier :param public_key: Cryptography public_key object :param int cache_timer: Cache lifetime of the public_key :param int next_update: Seconds until next update time """ # If the next_update is 0, then set it to 1 hour if next_update == 0: next_update = 3600 conn = sqlite3.connect(self.cache_location) conn.row_factory = sqlite3.Row curs = conn.cursor() curs.execute("DELETE FROM keycache WHERE issuer = '{}' AND key_id = '{}'".format(issuer, key_id)) KeyCache._addkeyinfo(curs, issuer, key_id, public_key, cache_timer=cache_timer, next_update = next_update) conn.commit() conn.close()
def rescan_errored(mpd_root): """ Rescan only errored files. """ # Connect to db db_path = os.path.join(_BLISSIFY_DATA_HOME, "db.sqlite3") logging.debug("Using DB path: %s." % (db_path,)) conn = sqlite3.connect(db_path) conn.row_factory = sqlite3.Row conn.execute('pragma foreign_keys=ON') cur = conn.cursor() # Get errored files cur.execute("SELECT filename FROM errors") errors = cur.fetchall() # Rerun blissify on them if errors is not None: subprocess.check_call(["blissify", mpd_root] + errors)
def log_get(self, chat_id, datetime_from, datetime_to=datetime.utcnow()): start_time = time.time() result = {"task_name": "log_get"} query_result = {} handle = sqlite3.connect(self._dbpath) handle.row_factory = sqlite3.Row cursor = handle.cursor() query = (chat_id, (datetime_from - datetime(1970,1,1)).total_seconds(), (datetime_to - datetime(1970,1,1)).total_seconds(),) msgs = cursor.execute("SELECT * FROM logs WHERE chat_id=? AND date>=? AND date<=?", query).fetchall() query_result["msg_count"] = len(msgs) for msg in msgs: print(msg) result["query_result"] = query_result result["exec_time"] = time.time() - start_time return(result)
def get_user(self, user_id): start_time = time.time() result = {"task_name": "get_user", "user": {}} handle = sqlite3.connect(self._dbpath) handle.row_factory = sqlite3.Row cursor = handle.cursor() user = cursor.execute("SELECT * FROM users WHERE id=?", (user_id,)).fetchone() if user: result["user"] = {"first_name" : user["first_name"], "last_name" : user["last_name"], "username" : user["username"], "id" : user["id"], "started" : user["started"], "bot_admin" : user["bot_admin"]} result["exec_time"] = time.time() - start_time return(result)
def toggle_user_option(self, user_id, option_name, chat_id=None): start_time = time.time() result = {"task_name" : "toggle_user_options"} handle = sqlite3.connect(self._dbpath) handle.row_factory = sqlite3.Row cursor = handle.cursor() if chat_id: query = "SELECT options FROM users_chats WHERE user_id=? AND chat_id=?" query_args = (user_id, chat_id) query_commit = "UPDATE users_chats SET options=? WHERE user_id=? AND chat_id=?" else: query = "SELECT options FROM users WHERE id=?" query_args = (user_id,) query_commit = "UPDATE users SET options=? WHERE id=?" options_db = cursor.execute(query, query_args).fetchone() if options_db: current_options = NotifyOptions(options_db["options"]) to_toggle = NotifyOptions(option_name) new_options = to_toggle ^ current_options query_args = (int(new_options),) + query_args cursor.execute(query_commit, query_args) handle.commit() result["exec_time"] = time.time() - start_time return(result)
def _connect_read_only(self): ''' Connect database with sqlite3 ''' try: conn = sqlite3.connect(self._gpkg) # Open in read-only mode needs Python 3.4+ # conn = sqlite3.connect('file:%s?mode=ro' % self._gpkg, uri=True) # Workaround: if os.stat(self._gpkg).st_size == 0: os.remove(self._gpkg) self.log(logging.ERROR, "Couldn't find GeoPackage '%s'" % self._gpkg) return None conn.row_factory = sqlite3.Row return conn.cursor() except sqlite3.Error as e: self.log(logging.ERROR, "Couldn't connect to GeoPackage: %s" % e.args[0]) return None
def get_stats(player_id, player_position): """ Downloads all stats listed on FanGraphs for player_id at player_position """ base_player_url = \ 'http://www.fangraphs.com/statsd.aspx?playerid={}&position={}&type=1&gds=&gde=&season=all' url = base_player_url.format(player_id, player_position) regex_row = r"rg(Alt)?Row" all_stats = [] resp = requests.get(url) soup = BeautifulSoup(resp.text, 'html.parser') dates = soup.findAll('tr', class_=re.compile(regex_row)) for date_info in dates: if 'Total' not in date_info.find('td').text: if player_position == 'P': print(url) stats = parse_pitcher_stats(date_info, player_id, player_position) insert_stats(stats, 'pitcher') else: print(url) stats = parse_player_stats(date_info, player_id, player_position) insert_stats(stats, 'player') return all_stats
def execute_insertion(self, query, *args): """Executes a query and returns the entire cursor This is intended to return the cursor, which needs to be closed after the results are fetched. This method can be used both for INSERT SQL statement or others like SELECT. :param string query: The SQL query. :return: A cursor that must be closed :rtype: sqlite3.Cursor """ connection = self.connection # The row factory returns a richer object to user, similar to dict connection.row_factory = sqlite3.Row cursor = connection.cursor() # Before execute a SQL query is necessary to turn on # the foreign_keys restrictions # cursor.execute("PRAGMA foreign_keys = ON;") # Execute the *real* query cursor.execute(query, args) connection.commit() return cursor # Must be closed outside function
def refreshList(self, dbFilter=None): self.clear() listHeader = self.headerItem() data = "rowid," for i in range(1,listHeader.columnCount()): data += listHeader.text(i) if i<listHeader.columnCount()-1: data += "," conn = sqlite3.connect(self.database) conn.row_factory = sqlite3.Row if dbFilter == None: jobList = conn.execute("SELECT %s FROM %s"%(data,self.listType)).fetchall() else: jobList = conn.execute("SELECT %s FROM %s WHERE %s"%(data,self.listType,dbFilter)).fetchall() for item in jobList: t = QtGui.QTreeWidgetItem(self) for i,h in enumerate(jobList[0].keys()): try: t.setText(i,str(item[h])) except: pass
def requestTask(self): # Connect to database conn = sqlite3.connect(pc.DATABASEPATH) conn.row_factory = sqlite3.Row # Needed to get database as a array cur = conn.cursor() request = cur.execute("SELECT rowid FROM farm_tasks WHERE status='waiting' ORDER BY priority DESC LIMIT 1") try: for item in request: print item taskId = int(float(item[0])) cur.execute('UPDATE farm_tasks SET status="run" WHERE rowid=%s'%(taskId)) conn.commit() conn.close() cmd = ("/execute %i"%(taskId)) return cmd except: return "/waitForNext"
def load_all_nets_from_db(dfile, clist, conly): netlist = [] client_bssids = [] for client in clist: client_bssids.append(client[0]) con = sql.connect(dfile) with con: con.row_factory = sql.Row cur = con.cursor() cur.execute("SELECT * from networks") rows = cur.fetchall() for row in rows: if conly: if row['bssid'] in client_bssids: rowdic = parse_db_row(row, conly) netlist.append(rowdic) else: rowdic = parse_db_row(row, conly) netlist.append(rowdic) return netlist # Load networks that match a specific SQL query
def __init__(self, db_path): self.db_path = db_path self.connection = sqlite3.connect(db_path) self.connection.row_factory = sqlite3.Row script = """ CREATE VIRTUAL TABLE IF NOT EXISTS tweets USING fts4 ( twitter_id INTEGER NOT NULL, text VARCHAR(500) NOT NULL, timestamp INTEGER NOT NULL UNIQUE(twitter_id)); CREATE TABLE IF NOT EXISTS user( screen_name VARCHAR(500) NOT NULL, name VARCHAR(500), description VARCHAR(500), location VARCHAR(500), UNIQUE(screen_name)); CREATE TABLE IF NOT EXISTS following( screen_name VARCHAR(500) NOT NULL, name VARCHAR(500), description VARCHAR(500), location VARCHAR(500), UNIQUE(screen_name)); """ self.connection.executescript(script) self.connection.commit()
def _generateDatabase(self): self._initDB() dbPath = self._binDiffSQL.getDbPath() dirPath,fileName = os.path.split(dbPath) fileName = "AutoDiff_" + fileName autoDiffDBPath = os.path.join(dirPath,fileName) shutil.copy(dbPath,autoDiffDBPath) #connect to new database db = sqlite3.connect(autoDiffDBPath) db.row_factory = sqlite3.Row #remove unnecessary stuff db.execute("DELETE FROM function WHERE similarity >= 1.0 OR id IN (SELECT func_id FROM sanitizer_summary)") db.commit() db.close() Logger.log("AutoDiff'ed BinDiff database is ready to load!!!") Logger.log("FILE : %s" % autoDiffDBPath)
def get_rows_as_dict(db, sql, entry=None): conn = sqlite3.connect(db) conn.row_factory = sqlite3.Row # This enables column access by name: row['column_name'] c = conn.cursor() if entry is None: rows = c.execute(sql).fetchall() else: rows = c.execute(sql, entry).fetchall() conn.commit() conn.close() return [dict(ix) for ix in rows] #if json_str: # return json.dumps( [dict(ix) for ix in rows] ) #CREATE JSON #return rows
def index(request): conn = sqlite3.connect(app.config.DB_PATH) conn.row_factory = sqlite3.Row cur = conn.cursor() cur.execute('select count(*) from users') total = cur.fetchone()[0] page, per_page, offset = Pagination.get_page_args(request) sql = 'select name from users limit {}, {}'\ .format(offset, per_page) cur.execute(sql) users = cur.fetchall() cur.close() conn.close() pagination = Pagination(request, total=total, record_name='users') return jinja.render('index.html', request, users=users, pagination=pagination)
def get_contained_cities(points, country=None): logging.info("citiesdb search location is %s", points) values = [points[0][0], points[1][0], points[0][1], points[1][1]] query = '' if country: values += [country] query = ' and country_code = ?', connection = sqlite_db.get_connection('cities', TEST_FILENAME) connection.row_factory = sqlite3.Row cursor = connection.cursor() cursor.execute( 'select * from City where ? < latitude and latitude < ? and ? < longitude and longitude < ? %s order by population desc' % query, values ) results = cursor.fetchall() return [City(x) for x in results]
def get_list_by_all_types(self, project_name): # get project result = self.get_project(project_name) if not result[0]: return(False, result[1]) # write series to db conn = sqlite3.connect(self.assets_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) conn.row_factory = sqlite3.Row c = conn.cursor() assets_list = [] for asset_type in self.asset_types: try: table = asset_type str_ = 'select * from ' + table c.execute(str_) rows = c.fetchall() for row in rows: assets_list.append(row) except: #print(('not found table from type: \" ' + asset_type + ' \"')) continue conn.close() return(True, assets_list)
def get_name_list_by_type(self, project_name, asset_type): result = self.get_project(project_name) if not result[0]: return(False, result[1]) # write series to db conn = sqlite3.connect(self.assets_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) conn.row_factory = sqlite3.Row c = conn.cursor() try: table = asset_type str_ = 'select * from ' + table c.execute(str_) rows = c.fetchall() names = [] for row in rows: names.append(row['name']) conn.close() return(True, rows) except: conn.close() return(True, [])
def get_id_name_dict_by_type(self, project_name, asset_type): result = self.get_project(project_name) if not result[0]: return(False, result[1]) # write series to db conn = sqlite3.connect(self.assets_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) conn.row_factory = sqlite3.Row c = conn.cursor() try: table = asset_type str_ = 'select * from ' + table c.execute(str_) rows = c.fetchall() asset_id_name_dict = {} for row in rows: asset_id_name_dict[row['id']] = row['name'] conn.close() return(True, asset_id_name_dict) except: conn.close() return(True, [])
def get_by_name(self, project_name, asset_type, asset_name): result = self.get_project(project_name) if not result[0]: return(False, result[1]) # write series to db conn = sqlite3.connect(self.assets_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) conn.row_factory = sqlite3.Row c = conn.cursor() try: table = asset_type str_ = 'select * from ' + table + ' where \"name\" = ?' c.execute(str_, (asset_name,)) row = c.fetchone() conn.close() return(True, row) except: conn.close() return(False, 'Not Asset With This Name!')
def connect_db(): """Connects to the specific database.""" rv = sqlite3.connect(app.config['DATABASE']) if app.config['DATABASE_LOG']: rv.set_trace_callback(print) rv.row_factory = sqlite3.Row return rv
def connect_db(): """Connects to the specific database.""" rv = sqlite3.connect('nweb.db') rv.row_factory = sqlite3.Row return rv
def perform_liveresponse(lr_session): running_processes = lr_session.list_processes() results = defaultdict(list) # get list of logged in users users = set([proc['username'].split('\\')[-1] for proc in running_processes if proc['path'].find('explorer.exe') != -1]) for user in users: try: with NamedTemporaryFile(delete=False) as tf: history_fp = lr_session.get_raw_file( "c:\\users\\%s\\appdata\\local\\google\\chrome\\user data\\default\\history" % user) shutil.copyfileobj(history_fp, tf.file) tf.close() db = sqlite3.connect(tf.name) db.row_factory = sqlite3.Row cur = db.cursor() cur.execute( "SELECT url, title, datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), 'unixepoch') as last_visit_time FROM urls ORDER BY last_visit_time DESC LIMIT 10") urls = [dict(u) for u in cur.fetchall()] except: pass else: results[user] = urls running_services = lr_session.create_process("c:\\windows\\system32\\net.exe start") return lr_session.sensor_id, running_services, results
def _open_database(self): """Opens a connection to the databse.""" log.debug("Opening sqlite database {0}".format(self._db_file)) self._conn = sqlite3.connect(self._db_file) try: log.debug("Checking if database is populated.") self._conn.execute("select * from airing_anime_list limit 1"); self._conn.row_factory = sqlite3.Row except sqlite3.OperationalError: log.debug("Database needs populated.") self._conn.row_factory = sqlite3.Row self._populate_database() log.debug("database connection established.")
def get_cursor(self): """ Returns a cursor from the provided database connection that DictORM objects expect. """ if self.kind == 'sqlite3': self.conn.row_factory = sqlite3.Row return self.conn.cursor() elif self.kind == 'postgresql': return self.conn.cursor(cursor_factory=DictCursor)