Python sqlalchemy.exc 模块,DBAPIError() 实例源码

我们从Python开源项目中,提取了以下18个代码示例,用于说明如何使用sqlalchemy.exc.DBAPIError()

项目:pyetje    作者:rorlika    | 项目源码 | 文件源码
def get_table_names(self, connection, schema=None, **kw):
        if schema is not None:
            qschema = self.identifier_preparer.quote_identifier(schema)
            master = '%s.sqlite_master' % qschema
            s = ("SELECT name FROM %s "
                 "WHERE type='table' ORDER BY name") % (master,)
            rs = connection.execute(s)
        else:
            try:
                s = ("SELECT name FROM "
                     " (SELECT * FROM sqlite_master UNION ALL "
                     "  SELECT * FROM sqlite_temp_master) "
                     "WHERE type='table' ORDER BY name")
                rs = connection.execute(s)
            except exc.DBAPIError:
                s = ("SELECT name FROM sqlite_master "
                     "WHERE type='table' ORDER BY name")
                rs = connection.execute(s)

        return [row[0] for row in rs]
项目:pyetje    作者:rorlika    | 项目源码 | 文件源码
def get_view_names(self, connection, schema=None, **kw):
        if schema is not None:
            qschema = self.identifier_preparer.quote_identifier(schema)
            master = '%s.sqlite_master' % qschema
            s = ("SELECT name FROM %s "
                 "WHERE type='view' ORDER BY name") % (master,)
            rs = connection.execute(s)
        else:
            try:
                s = ("SELECT name FROM "
                     " (SELECT * FROM sqlite_master UNION ALL "
                     "  SELECT * FROM sqlite_temp_master) "
                     "WHERE type='view' ORDER BY name")
                rs = connection.execute(s)
            except exc.DBAPIError:
                s = ("SELECT name FROM sqlite_master "
                     "WHERE type='view' ORDER BY name")
                rs = connection.execute(s)

        return [row[0] for row in rs]
项目:pyetje    作者:rorlika    | 项目源码 | 文件源码
def get_view_definition(self, connection, view_name, schema=None, **kw):
        quote = self.identifier_preparer.quote_identifier
        if schema is not None:
            qschema = self.identifier_preparer.quote_identifier(schema)
            master = '%s.sqlite_master' % qschema
            s = ("SELECT sql FROM %s WHERE name = '%s'"
                 "AND type='view'") % (master, view_name)
            rs = connection.execute(s)
        else:
            try:
                s = ("SELECT sql FROM "
                     " (SELECT * FROM sqlite_master UNION ALL "
                     "  SELECT * FROM sqlite_temp_master) "
                     "WHERE name = '%s' "
                     "AND type='view'") % view_name
                rs = connection.execute(s)
            except exc.DBAPIError:
                s = ("SELECT sql FROM sqlite_master WHERE name = '%s' "
                     "AND type='view'") % view_name
                rs = connection.execute(s)

        result = rs.fetchall()
        if result:
            return result[0].sql
项目:pyramid_starter    作者:jmercouris    | 项目源码 | 文件源码
def my_view(request):
    try:
        one = DBSession.query(MyModel).filter(MyModel.name == 'one').first()
    except DBAPIError:
        return Response(conn_err_msg, content_type='text/plain', status_int=500)
    return {'one': one, 'project': 'source'}
项目:sciz    作者:erk3    | 项目源码 | 文件源码
def init(self):
        try:
            sg.SqlAlchemyBase.metadata.create_all(self.engine)
        except (exc.SQLAlchemyError, exc.DBAPIError) as e:
            e.sciz_logger_flag = True
            sg.logger.error('Fail to init the DB! (SQLAlchemy error: %s)' % (str(e), ))
            raise

    # Connect to the DB
项目:yui    作者:item4    | 项目源码 | 文件源码
def get_database_engine(config: AttrDict) -> Engine:
    try:
        return config.DATABASE_ENGINE
    except AttributeError:
        db_url = config.DATABASE_URL
        echo = config.DATABASE_ECHO
        engine = create_engine(db_url, echo=echo)

        @listens_for(engine, 'engine_connect')
        def ping_connection(connection, branch):
            """
            Disconnect handling

            http://docs.sqlalchemy.org/en/latest/core/
            pooling.html#disconnect-handling-pessimistic

            """

            if branch:
                return

            save_should_close_with_result = connection.should_close_with_result
            connection.should_close_with_result = False

            try:
                connection.scalar(select([1]))
            except DBAPIError as err:
                if err.connection_invalidated:
                    connection.scalar(select([1]))
                else:
                    raise
            finally:
                connection.should_close_with_result = \
                    save_should_close_with_result

        return engine
项目:Anyblok_Pyramid    作者:AnyBlok    | 项目源码 | 文件源码
def should_retry(self, error):
        if isinstance(error, ConcurrentModificationError):
            return True
        if isinstance(error, DBAPIError):
            orig = error.orig
            for error_type, test in _retryable_errors:
                if isinstance(orig, error_type):
                    if test is None:
                        return True
                    if test(orig):
                        return True
项目:dearhrc.us    作者:JessaWitzel    | 项目源码 | 文件源码
def my_view(request):
    try:
        query = request.dbsession.query(MyModel)
        one = query.filter(MyModel.name == 'one').first()
    except DBAPIError:
        return Response(db_err_msg, content_type='text/plain', status=500)
    return {'one': one, 'project': 'dearhrc-backend'}
项目:Pysearch2.0    作者:Pysearch    | 项目源码 | 文件源码
def results_view(request):
    """Append result of each unique keyword of each unique url to be passed to be scored.
    Displays ranked results, their scores, and percent match.
    """
    web_page = request.params["url"]
    results = []
    try:
        unique_urls = []
        for val in request.dbsession.query(Match.page_url).distinct():

            unique_urls.append(val[0])

        print(unique_urls)

        unique_keywords = []
        for val in request.dbsession.query(Match.keyword).distinct():
            unique_keywords.append(val[0])
        print(unique_keywords)

        for url in unique_urls:
            for kw in unique_keywords:
                url_q = request.dbsession.query(Match).filter_by(keyword=kw).filter_by(page_url=url).first()
                if url_q:
                    results.append({'keyword': kw, 'weight': url_q.keyword_weight, 'url': url, 'count': url_q.count})

    except DBAPIError:
        return Response(db_err_msg, content_type='text/plain', status=500)
    results = score_data(results)
    return {"RESULTS": results, "web_page": web_page}
项目:radar    作者:renalreg    | 项目源码 | 文件源码
def ping_connection(connection, branch):
    if branch:
        # Don't ping sub-connections
        return

    try:
        # Test the connection
        connection.scalar(select([1]))
    except exc.DBAPIError as e:
        if e.connection_invalidated:
            # Establish a new connection
            connection.scalar(select([1]))
        else:
            raise
项目:pyramid-cookiecutter-alchemy    作者:Pylons    | 项目源码 | 文件源码
def my_view(request):
    try:
        query = request.dbsession.query(MyModel)
        one = query.filter(MyModel.name == 'one').first()
    except DBAPIError:
        return Response(db_err_msg, content_type='text/plain', status=500)
    return {'one': one, 'project': '{{ cookiecutter.project_name }}'}
项目:Albireo    作者:lordfriend    | 项目源码 | 文件源码
def __scan_non_finished_bangumi(self):
        """
        scan the bangumi whose status is not finished. and update its status if possible.
        :return:
        """
        session = SessionManager.Session()
        try:
            bangumi_list = session.query(Bangumi). \
                filter(Bangumi.delete_mark == None). \
                filter(Bangumi.status != Bangumi.STATUS_FINISHED). \
                all()
            for bangumi in bangumi_list:
                if bangumi.status == Bangumi.STATUS_PENDING and bangumi.air_date <= datetime.today().date():
                    bangumi.status = Bangumi.STATUS_ON_AIR
                if bangumi.status == Bangumi.STATUS_ON_AIR and self.__check_if_bangumi_finished(session, bangumi):
                    bangumi.status = Bangumi.STATUS_FINISHED
            session.commit()
        except exc.DBAPIError as db_error:
            logger.error(db_error, exc_info=True)
            # if connection is invalid rollback the session
            if db_error.connection_invalidated:
                session.rollback()
        except Exception as error:
            logger.error(error, exc_info=True)
            traceback.print_exc()
        finally:
            SessionManager.Session.remove()
项目:idealoom    作者:conversence    | 项目源码 | 文件源码
def transient_deadlock_tween_factory(handler, registry):
    """This defines a tween that will retry a request if it failed
    thanks to a deadlock in the virtuoso database."""
    def transient_deadlock_tween(request):
        try:
            return handler(request)
        except DBAPIError as e:
            orig = e.orig
            if getattr(orig, 'args', [None])[0] == '40001':
                time.sleep(random.random())
                raise DeadlockError(e.statement, e.params, orig)
            else:
                raise

    return transient_deadlock_tween
项目:sdic    作者:percolate    | 项目源码 | 文件源码
def launch_queries(directory, server):
    """
    Launch the queries found in the specified folder

    Param directory string Folder containing the SQL files
    Param server dict describing a server

    Returns: Bool value of whether we get query output or not
    """
    query_folder = os.path.join(directory, server['name'])
    files = get_query_files(query_folder)
    produced_output = False

    for filename in files:
        query_filename = os.path.join(directory, server['name'], filename)
        output = None
        with open(query_filename, 'r') as opened_file:
            query = opened_file.read()

            start_time = time.time()
            try:
                output = get_query_output(server, query)
            except DBAPIError:
                print "The following SQL query got interrupted:"
                print query
                print
                continue
            query_time = round(time.time() - start_time, 3)

            syslog.syslog('{} successfully ran in {} sec.'.format(filename,
                                                                  query_time))
        if output:
            produced_output = True

            # Announce that this query has results
            print "-----===== /!\ INCOMING BAD DATA /!\ =====-----"
            print
            print "Server: {}".format(server['name'])
            print "File: {}".format(filename)
            print
            # Display the raw query
            print "SQL Query:"
            print query

            # Display the results of the query
            print output
            print

    return produced_output
项目:EDDB_JsonAPI    作者:FuelRats    | 项目源码 | 文件源码
def nearest(request):
    try:
        x = request.params['x']
        y = request.params['y']
        z = request.params['z']
        if 'limit' in request.params:
            limit = request.params['limit']
        else:
            limit = 10
        if 'include' in request.params:
            include = True
        else:
            include = False
        if 'cubesize' in request.params:
            cubesize = request.params['cubesize']
        else:
            cubesize = 200
        if 'aggressive' in request.params:
            sql = text('SELECT *,(sqrt((systems.X - ' + x + ')^2 + (systems.Y - ' +
                        y + ')^2 + (systems.Z - ' + z + '0)^2)) as DISTANCE from '
                        'systems WHERE x BETWEEN ' + str(float(x)-cubesize) + ' AND ' +
                       str(float(x)+cubesize) + ' AND y BETWEEN ' + str(float(y)-cubesize) +
                       ' AND ' + str(float(y)+cubesize) +' AND z BETWEEN ' +
                        str(float(z)-cubesize) + ' AND '+ str(float(z)+cubesize) +
                       ' ORDER BY DISTANCE LIMIT ' + str(limit) + ';')
        else:
            sql = text('SELECT *,(sqrt((populated_systems.X - ' + x + ')^2 + (populated_systems.Y - ' +
                        y + ')^2 + (populated_systems.Z - ' + z + '0)^2)) as DISTANCE from '
                        'populated_systems ORDER BY DISTANCE LIMIT ' + str(limit) + ';')

        result = DBSession.execute(sql)
        candidates = []
        ids = []
        bodies = []
        stations = []
        for row in result:
            candidates.append({'name': row['name'], 'distance': row['distance'], 'id': row['id']})
            ids.append(row['id'])
        if include:
            query = DBSession.query(Body).filter(Body.system_id.in_(tuple(ids)))
            results = query.all()
            for row in results:
                bodies.append(object_as_dict(row))

            query = DBSession.query(Station).filter(Station.system_id.in_(tuple(ids)))
            results = query.all()
            for row in results:
                stations.append(object_as_dict(row))

    except DBAPIError:
        return Response(db_err_msg, content_type='text/plain', status=500)
    if bodies:
        return {'meta': {'query_x': x, 'query_y': y, 'query_z': z, 'limit': limit, 'cubesize': cubesize,
                         'include': include},
                'candidates': candidates, 'included': {'bodies': bodies, 'stations': stations}}
    else:
        return {'meta': {'query_x': x, 'query_y': y, 'query_z': z, 'limit': limit, 'cubesize':cubesize,
                         'included': include},
                'data': candidates}
项目:raw-data-repository    作者:all-of-us    | 项目源码 | 文件源码
def _ping_connection(connection, branch):
  """Makes sure connections are alive before trying to use them.

  Copied from SQLAlchemy 1.1 docs:
  http://docs.sqlalchemy.org/en/rel_1_1/core/pooling.html#disconnect-handling-pessimistic
  TODO(DA-321) Once SQLAlchemy v1.2 is out of development and released, switch to
  create_engine(pool_pre_ping=True).
  """
  if branch:
    # "branch" refers to a sub-connection of a connection,
    # we don't want to bother pinging on these.
    return

  # turn off "close with result".  This flag is only used with
  # "connectionless" execution, otherwise will be False in any case
  save_should_close_with_result = connection.should_close_with_result
  connection.should_close_with_result = False

  try:
    # run a SELECT 1.   use a core select() so that
    # the SELECT of a scalar value without a table is
    # appropriately formatted for the backend
    connection.scalar(select([1]))
  except DBAPIError as err:
    # catch SQLAlchemy's DBAPIError, which is a wrapper
    # for the DBAPI's exception.  It includes a .connection_invalidated
    # attribute which specifies if this connection is a "disconnect"
    # condition, which is based on inspection of the original exception
    # by the dialect in use.
    logging.warning('Database connection ping failed.', exc_info=True)
    if err.connection_invalidated:
      # run the same SELECT again - the connection will re-validate
      # itself and establish a new connection.  The disconnect detection
      # here also causes the whole connection pool to be invalidated
      # so that all stale connections are discarded.
      logging.warning('Database connection invalidated, reconnecting.')
      connection.scalar(select([1]))
    else:
      raise
  finally:
    # restore "close with result"
    connection.should_close_with_result = save_should_close_with_result
项目:Albireo    作者:lordfriend    | 项目源码 | 文件源码
def __scan_download_status_in_thread(self):
        logger.info('start scan download status')
        session = SessionManager.Session()
        try:
            current_time = datetime.utcnow()
            result = session.query(Episode).\
                options(joinedload(Episode.bangumi).joinedload(Bangumi.maintained_by)).\
                filter(Episode.airdate != None).\
                filter(Episode.status != Episode.STATUS_DOWNLOADED).\
                filter(Episode.airdate < current_time.date()).\
                filter(Bangumi.status != Bangumi.STATUS_FINISHED).\
                all()

            admin_map = {}

            for episode in result:
                if current_time.date() - episode.airdate < timedelta(days=episode.bangumi.alert_timeout):
                    continue
                bangumi_id = str(episode.bangumi_id)
                if episode.bangumi.maintained_by is None:
                    if 'sys' not in admin_map:
                        admin_map['sys'] = {}
                    if bangumi_id not in admin_map['sys']:
                        admin_map['sys'][bangumi_id] = {
                            'bangumi': episode.bangumi,
                            'episodes': []
                        }
                    admin_map['sys'][bangumi_id]['episodes'].append(episode)
                else:
                    maintainer_uid = str(episode.bangumi.maintained_by.id)
                    if maintainer_uid not in admin_map:
                        admin_map[maintainer_uid] = {
                            'user': episode.bangumi.maintained_by,
                            'bangumi_map': {}
                        }
                    if bangumi_id not in admin_map[maintainer_uid]:
                        admin_map[maintainer_uid]['bangumi_map'][bangumi_id] = {
                            'bangumi': episode.bangumi,
                            'episodes': []
                        }
                    admin_map[maintainer_uid]['bangumi_map'][bangumi_id]['episodes'].append(episode)

            msg_list = []
            for uid in admin_map:
                if uid == 'sys':
                    all_admin_list = session.query(User).filter(User.level >= User.LEVEL_ADMIN).all()
                    msg_list = msg_list + self.__send_email_to_all(all_admin_list, admin_map['sys'])
                elif admin_map[uid]['user'].email is None or not admin_map[uid]['user'].email_confirmed:
                    continue
                else:
                    msg_list.append(self.__send_email_to(admin_map[uid]['user'], admin_map[uid]['bangumi_map']))
            self.mail.send(msg_list)

        except exc.DBAPIError as db_error:
            logger.error(db_error, exc_info=True)
            # if connection is invalid rollback the session
            if db_error.connection_invalidated:
                session.rollback()
        finally:
            SessionManager.Session.remove()
项目:Albireo    作者:lordfriend    | 项目源码 | 文件源码
def __scan_current_on_air_bangumi(self):
        logger.info('start scan info of episode')
        session = SessionManager.Session()
        try:
            result = session.query(Episode, Bangumi). \
                join(Bangumi). \
                filter(Bangumi.delete_mark == None). \
                filter(Bangumi.status != Bangumi.STATUS_FINISHED)

            bgm_episode_dict = {}

            for episode, bangumi in result:
                # check terminated state to response instantly.
                if self.terminated:
                    return

                if not (bangumi.bgm_id in bgm_episode_dict):
                    # if this is not the first call for get_bgm_info,
                    # a delay should be added to prevent access the bgm api
                    # too frequently
                    if bgm_episode_dict:
                        time.sleep(20)
                    logger.info('try to get info for bangumi of %s' % str(bangumi.bgm_id))
                    (status_code, bangumi_info) = self.get_bgm_info(bangumi.bgm_id)
                    if status_code < 400:
                        bgm_episode_dict[bangumi.bgm_id] = bangumi_info

                if not (bangumi.bgm_id in bgm_episode_dict):
                    continue

                bangumi_info = bgm_episode_dict[bangumi.bgm_id]

                for eps in bangumi_info['eps']:
                    if eps['id'] == episode.bgm_eps_id:
                        if episode.name == '':
                            episode.name = eps['name']
                        if episode.name_cn == '':
                            episode.name_cn = eps['name_cn']
                        if episode.duration == '':
                            episode.duration = eps['duration']
                        # always update airdate because it can be changed.
                        if is_valid_date(eps['airdate']):
                            episode.airdate = eps['airdate']
                        break

            session.commit()
            logger.info('scan finished, will scan at next day')
        except exc.DBAPIError as db_error:
            logger.error(db_error, exc_info=True)
            # if connection is invalid rollback the session
            if db_error.connection_invalidated:
                session.rollback()
        except Exception as error:
            logger.error(error, exc_info=True)
            traceback.print_exc()
        finally:
            SessionManager.Session.remove()