我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用sqlalchemy.exc.DatabaseError()。
def touch_collection_replicas(collection_replicas, session=None): """ Update the accessed_at timestamp of the given collection replicas. :param collection_replicas: the list of collection replicas. :param session: The database session in use. :returns: True, if successful, False otherwise. """ rse_ids, now = {}, datetime.utcnow() for collection_replica in collection_replicas: if 'rse_id' not in collection_replica: if collection_replica['rse'] not in rse_ids: rse_ids[collection_replica['rse']] = get_rse_id(rse=collection_replica['rse'], session=session) collection_replica['rse_id'] = rse_ids[collection_replica['rse']] try: session.query(models.CollectionReplica).filter_by(scope=collection_replica['scope'], name=collection_replica['name'], rse_id=collection_replica['rse_id']).\ update({'accessed_at': collection_replica.get('accessed_at') or now}, synchronize_session=False) except DatabaseError: return False return True
def set_new_dids(dids, new_flag, session=None): """ Set/reset the flag new :param dids: A list of dids :param new_flag: A boolean to flag new DIDs. :param session: The database session in use. """ for did in dids: try: # session.query(models.DataIdentifier).filter_by(scope=did['scope'], name=did['name']).with_for_update(nowait=True).first() # session.query(models.DataIdentifier).filter_by(scope=did['scope'], name=did['name']).first() rowcount = session.query(models.DataIdentifier).filter_by(scope=did['scope'], name=did['name']).update({'is_new': new_flag}, synchronize_session=False) if not rowcount: raise exception.DataIdentifierNotFound("Data identifier '%s:%s' not found" % (did['scope'], did['name'])) except DatabaseError as error: raise exception.DatabaseException('%s : Cannot update %s:%s' % (error.args[0], did['scope'], did['name'])) try: session.flush() except IntegrityError as error: raise exception.RucioException(error.args[0]) except DatabaseError as error: raise exception.RucioException(error.args[0]) return True
def touch_dids(dids, session=None): """ Update the accessed_at timestamp of the given dids. :param replicas: the list of dids. :param session: The database session in use. :returns: True, if successful, False otherwise. """ now = datetime.utcnow() try: for did in dids: session.query(models.DataIdentifier).filter_by(scope=did['scope'], name=did['name'], did_type=did['type']).\ update({'accessed_at': did.get('accessed_at') or now}, synchronize_session=False) except DatabaseError: return False return True
def add_message(event_type, payload, session=None): """ Add a message to be submitted asynchronously to a message broker. :param event_type: The type of the event as a string, e.g., NEW_DID. :param payload: The message payload. Will be persisted as JSON. :param session: The database session to use. """ try: new_message = Message(event_type=event_type, payload=json.dumps(payload)) except TypeError, e: raise InvalidObject('Invalid JSON for payload: %(e)s' % locals()) except DatabaseError, e: if re.match('.*ORA-12899.*', e.args[0]) \ or re.match('.*1406.*', e.args[0]): raise RucioException('Could not persist message, payload too large') new_message.save(session=session, flush=False)
def write(self, if_exists:str=None): def _create_database(): return self._engine.dialect.dbapi.create_database( user=self._username, password=self._password, host=self._server, database=self.uuid, page_size=self._pagesize ) from sqlalchemy_utils import database_exists from sqlalchemy import exc as dbexceptions try: if not database_exists(self._engine.url): a = _create_database() except dbexceptions.DatabaseError: a = _create_database() super().write(if_exists=if_exists)
def add_group_plugin(request): # make sure the users are in the group: # only a site admin or group admin can do this gid = request.matchdict['group_id'] can_modify_group(request, gid) plugins = request.POST.get('plugins', None) # print request.POST.values() if plugins is None: return exc.HTTPFound(location='/group/' + gid) for plugin in request.POST.values(): scheduled_plugin = LedSchedule(led_group_id=gid, led_plugin_id=int(plugin), duration=30, enabled=True, position=9) try: request.db_session.add(scheduled_plugin) except sql_exc.DatabaseError as e: print scheduled_plugin, "already in scheduled" return exc.HTTPFound(location='/group/' + gid)
def add_group_users(request): # make sure the users are in the group: # only a site admin or group admin can do this gid = request.matchdict['group_id'] can_modify_group(request, gid) users = request.POST.get('users', None) # print request.POST if users is None: raise exc.HTTPBadRequest('Please specify users to add to the group') new_users = [] for user in request.POST.values(): group_user = LedGroupUser(led_group_id=gid, led_user_id=user) try: request.db_session.add(group_user) new_users.append(get_user_by_id(request, user).email) except sql_exc.DatabaseError as e: print group_user, "already in group" log(request, 'Added users to <a href="/group/{0}">group {0}</a>: {1}'.format(gid, ', '.join(new_users))) return exc.HTTPFound(location='/group/' + gid)
def _ora_drop_ignore(conn, dbname): try: conn.execute("drop user %s cascade" % dbname) log.info("Reaped db: %s" % dbname) return True except exc.DatabaseError as err: log.warn("couldn't drop db: %s" % err) return False
def _ora_drop_ignore(conn, dbname): try: conn.execute("drop user %s cascade" % dbname) log.info("Reaped db: %s", dbname) return True except exc.DatabaseError as err: log.warning("couldn't drop db: %s", err) return False
def on_get(self, req, resp): from_currency = req.get_param("from", required=True) to_currency = req.get_param("to", required=True) date_of_exchange = req.get_param_as_date("date") date_of_exchange = date_of_exchange if date_of_exchange else date.today() invalid_currencies = [currency for currency in (from_currency, to_currency) if currency not in SUPPORTED_CURRENCIES] if invalid_currencies: raise falcon.HTTPInvalidParam("Invalid currency", " and ".join(invalid_currencies)) exchange_rate = None try: exchange_rate = self.container.exchange_rate_manager.get_exchange_rate_by_date(date_of_exchange, from_currency, to_currency) except DatabaseError: self.container.db_session.rollback() self.container.logger.exception("Database error occurred. Rollback session to allow reconnect to the DB on next request.") except Exception: self.container.logger.exception("Unexpected exception while rate request %s->%s (%s)", from_currency, to_currency, date_of_exchange) if not exchange_rate: self.container.logger.error("Exchange rate not found: rate %s %s->%s", date_of_exchange, from_currency, to_currency) raise falcon.HTTPInternalServerError("Exchange rate not found", "Exchange rate not found") self.container.logger.info("GET rate %s %s->%s %s", date_of_exchange, from_currency, to_currency, exchange_rate) resp.status = falcon.HTTP_200 resp.body = json.dumps( { "date": date_of_exchange.strftime(format="%Y-%m-%d"), "from_currency": from_currency, "to_currency": to_currency, "exchange_rate": str(exchange_rate) } )
def on_get(self, req, resp): from_currency = req.get_param("from", required=True) to_currency = req.get_param("to", required=True) start_date = req.get_param_as_date("start_date", required=True) end_date = req.get_param_as_date("end_date", required=True) invalid_currencies = [currency for currency in (from_currency, to_currency) if currency not in SUPPORTED_CURRENCIES] if invalid_currencies: raise falcon.HTTPInvalidParam("Invalid currency", " and ".join(invalid_currencies)) exchange_rate = None try: if start_date == end_date: exchange_rate = self.container.exchange_rate_manager.get_exchange_rate_by_date(start_date, from_currency, to_currency) else: exchange_rate = self.container.exchange_rate_manager.get_average_exchange_rate_by_dates(start_date, end_date, from_currency, to_currency) except DatabaseError: self.container.db_session.rollback() self.container.logger.exception("Database error occurred. Rollback session to allow reconnect to the DB on next request.") except Exception: self.container.logger.exception("Unexpected exception while range request %s->%s (%s - %s)", from_currency, to_currency, start_date, end_date) if not exchange_rate: self.container.logger.error("Exchange rate not found: range %s/%s %s->%s", start_date, end_date, from_currency, to_currency) raise falcon.HTTPInternalServerError("Exchange rate not found", "Exchange rate not found") self.container.logger.info("GET range %s/%s %s->%s %s", start_date, end_date, from_currency, to_currency, exchange_rate) resp.status = falcon.HTTP_200 resp.body = json.dumps( { "start_date": start_date.strftime(format="%Y-%m-%d"), "end_date": end_date.strftime(format="%Y-%m-%d"), "from_currency": from_currency, "to_currency": to_currency, "exchange_rate": str(exchange_rate) } )
def on_get(self, req, resp): try: exchange_rate = self.container.exchange_rate_manager.get_exchange_rate_by_date(date.today(), "USD", "USD") if exchange_rate: resp.body = '{"status": "UP"}' else: resp.body = '{"status": "DOWN", "info": "No exchange rate available."}' except DatabaseError as e: self.container.db_session.rollback() resp.body = '{"status": "DOWN", "info": "Database error. Service will reconnect to the DB automatically. Exception: %s"}' % e except Exception as e: resp.body = '{"status": "DOWN", "info": "%s"}' % e resp.status = falcon.HTTP_200
def __bulk_add_new_file_dids(files, account, dataset_meta=None, session=None): """ Bulk add new dids. :param dids: the list of new files. :param account: The account owner. :param session: The database session in use. :returns: True is successful. """ for file in files: new_did = models.DataIdentifier(scope=file['scope'], name=file['name'], account=file.get('account') or account, did_type=DIDType.FILE, bytes=file['bytes'], md5=file.get('md5'), adler32=file.get('adler32'), is_new=None) for key in file.get('meta', []): new_did.update({key: file['meta'][key]}) for key in dataset_meta or {}: new_did.update({key: dataset_meta[key]}) new_did.save(session=session, flush=False) try: session.flush() except IntegrityError, error: raise exception.RucioException(error.args) except DatabaseError, error: raise exception.RucioException(error.args) except FlushError, error: if match('New instance .* with identity key .* conflicts with persistent instance', error.args[0]): raise exception.DataIdentifierAlreadyExists('Data Identifier already exists!') raise exception.RucioException(error.args) return True
def touch_dataset_locks(dataset_locks, session=None): """ Update the accessed_at timestamp of the given dataset locks + eol_at. :param replicas: the list of dataset locks. :param session: The database session in use. :returns: True, if successful, False otherwise. """ rse_ids, now = {}, datetime.utcnow() for dataset_lock in dataset_locks: if 'rse_id' not in dataset_lock: if dataset_lock['rse'] not in rse_ids: rse_ids[dataset_lock['rse']] = get_rse_id(rse=dataset_lock['rse'], session=session) dataset_lock['rse_id'] = rse_ids[dataset_lock['rse']] eol_at = define_eol(dataset_lock['scope'], dataset_lock['name'], rses=[{'id': dataset_lock['rse_id']}], session=session) try: session.query(models.DatasetLock).filter_by(scope=dataset_lock['scope'], name=dataset_lock['name'], rse_id=dataset_lock['rse_id']).\ update({'accessed_at': dataset_lock.get('accessed_at') or now}, synchronize_session=False) session.query(models.ReplicationRule).filter_by(scope=dataset_lock['scope'], name=dataset_lock['name']).update({'eol_at': eol_at}, synchronize_session=False) except DatabaseError: return False return True
def add_distance(src_rse_id, dest_rse_id, ranking=None, agis_distance=None, geoip_distance=None, active=None, submitted=None, finished=None, failed=None, transfer_speed=None, session=None): """ Add a src-dest distance. :param src_rse_id: The source RSE ID. :param dest_rse_id: The destination RSE ID. :param ranking: Ranking as an integer. :param agis_distance: AGIS Distance as an integer. :param geoip_distance: GEOIP Distance as an integer. :param active: Active FTS transfers as an integer. :param submitted: Submitted FTS transfers as an integer. :param finished: Finished FTS transfers as an integer. :param failed: Failed FTS transfers as an integer. :param transfer_speed: FTS transfer speed as an integer. :param session: The database session to use. """ try: new_distance = Distance(src_rse_id=src_rse_id, dest_rse_id=dest_rse_id, ranking=ranking, agis_distance=agis_distance, geoip_distance=geoip_distance, active=active, submitted=submitted, finished=finished, failed=failed, transfer_speed=transfer_speed) new_distance.save(session=session) except IntegrityError: raise exception.Duplicate('Distance from %s to %s already exists!' % (src_rse_id, dest_rse_id)) except DatabaseError as error: raise exception.RucioException(error.args)
def read_session(function): ''' decorator that set the session variable to use inside a function. With that decorator it's possible to use the session variable like if a global variable session is declared. session is a sqlalchemy session, and you can get one calling get_session(). This is useful if only SELECTs and the like are being done; anything involving INSERTs, UPDATEs etc should use transactional_session. ''' @retry(retry_on_exception=retry_if_db_connection_error, wait_fixed=0.5, stop_max_attempt_number=2, wrap_exception=False) @wraps(function) def new_funct(*args, **kwargs): if isgeneratorfunction(function): raise RucioException('read_session decorator should not be used with generator. Use stream_session instead.') if not kwargs.get('session'): session = get_session() try: kwargs['session'] = session return function(*args, **kwargs) except TimeoutError, error: session.rollback() # pylint: disable=maybe-no-member raise DatabaseException(str(error)) except DatabaseError, error: session.rollback() # pylint: disable=maybe-no-member raise DatabaseException(str(error)) except: session.rollback() # pylint: disable=maybe-no-member raise finally: session.remove() try: return function(*args, **kwargs) except: raise new_funct.__doc__ = function.__doc__ return new_funct
def transactional_session(function): ''' decorator that set the session variable to use inside a function. With that decorator it's possible to use the session variable like if a global variable session is declared. session is a sqlalchemy session, and you can get one calling get_session(). ''' @wraps(function) def new_funct(*args, **kwargs): if not kwargs.get('session'): session = get_session() try: kwargs['session'] = session result = function(*args, **kwargs) session.commit() # pylint: disable=maybe-no-member except TimeoutError, error: print error session.rollback() # pylint: disable=maybe-no-member raise DatabaseException(str(error)) except DatabaseError, error: print error session.rollback() # pylint: disable=maybe-no-member raise DatabaseException(str(error)) except: session.rollback() # pylint: disable=maybe-no-member raise finally: session.remove() # pylint: disable=maybe-no-member else: result = function(*args, **kwargs) return result new_funct.__doc__ = function.__doc__ return new_funct
def __call__(self, query, env=None): """ Execute the SQL query. Automatically creates tables mentioned in the query from dataframes before executing. :param query: SQL query string, which can reference pandas dataframes as SQL tables. :param env: Variables environment - a dict mapping table names to pandas dataframes. If not specified use local and global variables of the caller. :return: Pandas dataframe with the result of the SQL query. """ if env is None: env = get_outer_frame_variables() with self.conn as conn: for table_name in extract_table_names(query): if table_name not in env: # don't raise error because the table may be already in the database continue if self.persist and table_name in self.loaded_tables: # table was loaded before using the same instance, don't do it again continue self.loaded_tables.add(table_name) write_table(env[table_name], table_name, conn) try: result = read_sql(query, conn) except DatabaseError as ex: raise PandaSQLException(ex) except ResourceClosedError: # query returns nothing result = None return result
def commit_session(response): """ Try to commit the db session in the case of a successful request with status_code under 400. """ if response.status_code >= 400: return response try: db_session.commit() except DatabaseError: db_session.rollback() return response
def _flush(self): try: db_session.flush() except DatabaseError: db_session.rollback()
def cockroach_transaction(f): def run_transaction(caller): while True: with MONITOR_COCKROACHDB.observe_transaction(caller): try: return f() except DatabaseError as e: if not isinstance(e.orig, psycopg2.OperationalError) and \ not e.orig.pgcode == psycopg2.errorcodes.SERIALIZATION_FAILURE: raise MONITOR_COCKROACHDB.cockroach_retry_count.labels(caller).inc() return run_transaction
def drop(self): """ Remove version control from a database. """ if SQLA_07: try: self.table.drop() except sa_exceptions.DatabaseError: raise exceptions.DatabaseNotControlledError(str(self.table)) else: try: self.table.drop() except (sa_exceptions.SQLError): raise exceptions.DatabaseNotControlledError(str(self.table))
def __fill_player(data, api, account_id): LOGGER.info('Find account id: %d', account_id) # Important: Cannot utilize steamids with account_ids, because the orders of returned players was not in the same sequence and no account id in the response. try: players = api.get_player_summaries(steamids=account_id) except (APIError, APITimeoutError) as error: LOGGER.error('Failed to retrieved account id: %d and for now it will be recorded with minimum info, error: %s', account_id, str(error)) # Temporary creates a blank account with consideration that this account will be synch up again in the next fill_database_detail() invocation. players = { 'players': [ { 'steamid': account_id, 'profileurl': 'N/A' } ] } if players is None: LOGGER.info('Not found account id: %d', account_id) return False for player in players['players']: steam_id = player['steamid'] real_name = player.get('realname', None) persona_name = player.get('personaname', None) avatar = player.get('avatarfull', None) profile_url = player.get('profileurl', None) data_player = data.get_player(account_id=account_id) try: if data_player: data_player.steam_id = steam_id data_player.real_name = real_name data_player.persona_name = persona_name data_player.avatar = avatar data_player.profile_url = profile_url data.update_player(player=data_player) LOGGER.info('Updated account id: %d', account_id) else: data.add_player(account_id=account_id, steam_id=steam_id, profile_url=profile_url, real_name=real_name, persona_name=persona_name, avatar=avatar) LOGGER.info('Created account id: %d', account_id) return True except DatabaseError as error: # Temporary ignore the unsupported data, especially the unicode issue. LOGGER.error('Failed to process account id: %d, error: %s', account_id, str(error)) Database.session.rollback() return False
def __bulk_add_replicas(rse_id, files, account, session=None): """ Bulk add new dids. :param rse_id: the RSE id. :param dids: the list of files. :param account: The account owner. :param session: The database session in use. :returns: True is successful. """ nbfiles, bytes = 0, 0 # Check for the replicas already available condition = or_() for f in files: condition.append(and_(models.RSEFileAssociation.scope == f['scope'], models.RSEFileAssociation.name == f['name'], models.RSEFileAssociation.rse_id == rse_id)) query = session.query(models.RSEFileAssociation.scope, models.RSEFileAssociation.name, models.RSEFileAssociation.rse_id).\ with_hint(models.RSEFileAssociation, text="INDEX(REPLICAS REPLICAS_PK)", dialect_name='oracle').\ filter(condition) available_replicas = [dict([(column, getattr(row, column)) for column in row._fields]) for row in query] new_replicas = [] for file in files: found = False for available_replica in available_replicas: if file['scope'] == available_replica['scope'] and file['name'] == available_replica['name'] and rse_id == available_replica['rse_id']: found = True break if not found: nbfiles += 1 bytes += file['bytes'] new_replicas.append({'rse_id': rse_id, 'scope': file['scope'], 'name': file['name'], 'bytes': file['bytes'], 'path': file.get('path'), 'state': ReplicaState.from_string(file.get('state', 'A')), 'md5': file.get('md5'), 'adler32': file.get('adler32'), 'lock_cnt': file.get('lock_cnt', 0), 'tombstone': file.get('tombstone')}) # new_replica = models.RSEFileAssociation(rse_id=rse_id, scope=file['scope'], name=file['name'], bytes=file['bytes'], # path=file.get('path'), state=ReplicaState.from_string(file.get('state', 'A')), # md5=file.get('md5'), adler32=file.get('adler32'), lock_cnt=file.get('lock_cnt', 0), # tombstone=file.get('tombstone')) # new_replica.save(session=session, flush=False) try: new_replicas and session.bulk_insert_mappings(models.RSEFileAssociation, new_replicas) session.flush() return nbfiles, bytes except IntegrityError, error: if match('.*IntegrityError.*ORA-00001: unique constraint .*REPLICAS_PK.*violated.*', error.args[0]) \ or match('.*IntegrityError.*1062.*Duplicate entry.*', error.args[0]) \ or error.args[0] == '(IntegrityError) columns rse_id, scope, name are not unique' \ or match('.*IntegrityError.*duplicate key value violates unique constraint.*', error.args[0]): raise exception.Duplicate("File replica already exists!") raise exception.RucioException(error.args) except DatabaseError, error: raise exception.RucioException(error.args)