我们从Python开源项目中,提取了以下27个代码示例,用于说明如何使用redis.RedisError()。
def dump_and_reload(self, restart_process=False): """ Dump the rdb and reload it, to test for serialization errors """ conn = self.client() if restart_process: if self._is_external: warnings.warn('Tied to an external process. Cannot restart') return import time conn.bgrewriteaof() self._wait_for_child() self.stop(for_restart=True) self.start() else: conn.save() try: conn.execute_command('DEBUG', 'RELOAD') except redis.RedisError as err: self.errored = True raise err
def _get_cached_url(self, url): cache_key = self._get_cache_key(url) try: cached_data = self.redis_client.get(cache_key) except redis.RedisError: raise self.MetadataClientException('Unable to read from redis.') if cached_data is not None: statsd_client.incr('redis_cache_hit') try: return json.loads(cached_data) except ValueError: raise self.MetadataClientException( ('Unable to load JSON data ' 'from cache for key: {key}').format(key=cache_key)) else: statsd_client.incr('redis_cache_miss')
def execute_command(host, port, command): r = redis.StrictRedis(host, port, socket_timeout=1) try: result = r.execute_command(command) except redis.RedisError as e: result = "Err: {}".format(e) return jsonify(result=result, status=0)
def _persistent_connect(self, db_name): """ Keep reconnecting to Database 'db_name' until success """ while True: try: self._onetime_connect(db_name) return except RedisError: t_wait = self.CONNECT_RETRY_WAIT_TIME logger.warning("Connecting to DB '{}' failed, will retry in {}s".format(db_name, t_wait)) self.close(db_name) time.sleep(t_wait)
def hello(): try: visits = redis.incr("counter") except RedisError: visits = "<i>cannot connect to Redis, counter disabled</i>" html = "<h3>Hello {name}!</h3>" \ "<b>Hostname:</b> {hostname}<br/>" \ "<b>Visits:</b> {visits}" return html.format(name=os.getenv("NAME", "world"), hostname=socket.gethostname(), visits=visits)
def test_pocket_client_raises_exception_if_redis_fails_to_get(self): self.mock_redis.get.side_effect = redis.RedisError with self.assertRaises(PocketClient.PocketException): self.pocket_client.fetch_recommended_urls()
def test_pocket_client_raises_exception_if_redis_fails_to_set(self): self.mock_requests_get.return_value = self.get_mock_response( content=json.dumps(self.sample_pocket_data)) self.mock_redis.setex.side_effect = redis.RedisError with self.assertRaises(self.pocket_client.PocketException): self.pocket_client.fetch_recommended_urls()
def test_pocket_client_raises_exception_if_redis_fails(self): self.mock_redis.get.side_effect = redis.RedisError with self.assertRaises(PocketClient.PocketException): self.pocket_client.get_recommended_urls()
def test_pocket_client_raises_exception_if_unable_to_write_to_redis(self): self.mock_redis.setex.side_effect = redis.RedisError with self.assertRaises(PocketClient.PocketException): self.pocket_client.get_recommended_urls()
def test_urlextractorexception_returns_error(self): self.mock_redis.get.side_effect = redis.RedisError() request = self.get_mock_request(urls=self.sample_urls) with self.assertRaises(HTTPException) as cm: get_metadata(self.metadata_client, self.get_config(), request) self.assertEqual(cm.exception.response.status_code, 500)
def test_redis_get_error_raises_exception(self): self.mock_redis.get.side_effect = redis.RedisError with self.assertRaises(MetadataClient.MetadataClientException): self.metadata_client.get_cached_urls(self.sample_urls)
def test_redis_get_error_raises_exception(self): self.mock_redis.setex.side_effect = redis.RedisError remote_data = self.get_mock_urls_data(self.sample_urls) self.metadata_client._make_remote_request.return_value = ( self.get_mock_response(content=json.dumps(remote_data))) with self.assertRaises(MetadataClient.MetadataClientException): self.metadata_client.get_remote_urls(self.sample_urls) self.assertEqual(self.mock_redis.setex.call_count, 1)
def _set_cached_url(self, url, data, timeout): cache_key = self._get_cache_key(url) try: self.redis_client.setex(cache_key, timeout, json.dumps(data)) statsd_client.incr('redis_cache_write') except redis.RedisError: raise self.MetadataClientException('Unable to write to redis.')
def getValues(self, identifiers): """Get a value from the cache for the given identifiers. @param identifiers: A C{list} of identifier to make the keys. @return A C{list} with all the values for the given identifiers. """ if not identifiers: return [] try: keys = [self._getKey(identifier) for identifier in identifiers] return self._client.mget(keys) except RedisError as error: logging.error('Redis error: %s', error)
def setValues(self, values): """Set values in the cache for the given identifiers. @param values: A C{dict} mapping identifiers to values. """ pipe = self._client.pipeline() for identifier, value in values.iteritems(): pipe.setex(self._getKey(identifier), value, self.expireTimeout) try: results = pipe.execute() for item in results: if isinstance(item, RedisError): raise item except RedisError as error: logging.error('Redis error: %s', error)
def deleteValues(self, identifiers): """Delete values from the cache for the given identifiers. @param identifiers: A C{list} of identifier to make the keys. """ if not identifiers: return try: keys = [self._getKey(identifier) for identifier in identifiers] self._client.delete(*keys) except RedisError as error: logging.error('Redis error: %s', error)
def feed(self, queue_name, req): if self.custom: from custom_redis.client.errors import RedisError else: from redis import RedisError try: self.redis_conn.zadd(queue_name, req, -self.priority) return 0 except RedisError: traceback.print_exc() return 1
def redisConnect(self, redis_db, use_pool = True): if 'host' in redis_db: _host = redis_db['host'] else: _host = '127.0.0.1' self._host_redis = _host if 'db' in redis_db: _db = redis_db['db'] else: _db = 0 self._db_redis = _db if 'port' in redis_db: _port = redis_db['port'] else: _port = 6379 self._port_redis = _port if 'auth' in redis_db: _auth = redis_db['auth'] else: _auth = None self._auth_redis = _auth try: if use_pool: redis_pool = redis.ConnectionPool(host = _host, port = _port, db = _db, password = _auth) self.conn_redis = redis.Redis(connection_pool = redis_pool) else: self.conn_redis = redis.Redis(host = _host, port = _port, db = _db, password = _auth) print 'Redis connected %s %s' % (_host, _port) except redis.RedisError as e: print 'Redis Error: %s' % e
def setUp(self): super(RedisStorageTest, self).setUp() self.client_error_patcher = mock.patch.object( self.storage._client.connection_pool, 'get_connection', side_effect=redis.RedisError('connection error'))
def test_backend_error_is_raised_anywhere(self): with mock.patch.object(self.storage._client, 'pipeline', side_effect=redis.RedisError): StorageTest.test_backend_error_is_raised_anywhere(self)
def setUp(self): super(RedisPermissionTest, self).setUp() self.client_error_patcher = [ mock.patch.object( self.permission._client, 'execute_command', side_effect=redis.RedisError), mock.patch.object( self.permission._client, 'pipeline', side_effect=redis.RedisError)]
def setUp(self): super(RedisCacheTest, self).setUp() self.client_error_patcher = mock.patch.object( self.cache._client, 'execute_command', side_effect=redis.RedisError)
def wrap_redis_error(func): @wraps(func) def wrapped(*args, **kwargs): try: return func(*args, **kwargs) except redis.RedisError as e: logger.exception(e) raise exceptions.BackendError(original=e) return wrapped
def deleteBanchoSessions(self): """ Remove all `peppy:sessions:*` redis keys. Call at bancho startup to delete old cached sessions :return: """ try: # TODO: Make function or some redis meme glob.redis.eval("return redis.call('del', unpack(redis.call('keys', ARGV[1])))", 0, "peppy:sessions:*") except redis.RedisError: pass
def init_redis(self,host,port,password,db): pool=redis.ConnectionPool(host=host,port=port,password=password,db=db) self._r=redis.Redis(connection_pool=pool) try: self._r.setnx('test','test') self._is_init=True except redis.RedisError,e: print("Redis init failed: %s" % str(e)) self._is_init=False self._r=None
def blockable(f): """ "blocking" decorator for Redis accessor methods. Wrapped functions that specify kwarg 'blocking' will wait for the specified accessor to return with data.:: class SonicV1Connector: @blockable def keys(self, db_name): # ... # call with: db = SonicV1Connector() # ... db.keys('DATABASE', blocking=True) """ @wraps(f) def wrapped(inst, db_name, *args, **kwargs): blocking = kwargs.pop('blocking', False) attempts = 0 while True: try: ret_data = f(inst, db_name, *args, **kwargs) inst._unsubscribe_keyspace_notification(db_name) return ret_data except UnavailableDataError as e: if blocking: if db_name in inst.keyspace_notification_channels: result = inst._unavailable_data_handler(db_name, e.data) if result: continue # received updates, try to read data again else: inst._unsubscribe_keyspace_notification(db_name) raise # No updates was received. Raise exception else: # Subscribe to updates and try it again (avoiding race condition) inst._subscribe_keyspace_notification(db_name) else: return None except redis.exceptions.ResponseError: """ A response error indicates that something is fundamentally wrong with the request itself. Retrying the request won't pass unless the schema itself changes. In this case, the error should be attributed to the application itself. Re-raise the error. """ logger.exception("Bad DB request [{}:{}]{{ {} }}".format(db_name, f.__name__, str(args))) raise except (redis.exceptions.RedisError, OSError): attempts += 1 inst._connection_error_handler(db_name) msg = "DB access failure by [{}:{}]{{ {} }}".format(db_name, f.__name__, str(args)) if BLOCKING_ATTEMPT_ERROR_THRESHOLD < attempts < BLOCKING_ATTEMPT_SUPPRESSION: # Repeated access failures implies the database itself is unhealthy. logger.exception(msg=msg) else: logger.warning(msg=msg) return wrapped
def fetch_recommended_urls(self): with statsd_client.timer('pocket_request_timer'): try: response = requests.get(self.pocket_url) except requests.RequestException, e: raise self.PocketException( ('Unable to communicate ' 'with pocket: {error}').format(error=e)) if response.status_code != 200: statsd_client.incr('pocket_request_failure') raise self.PocketException( ('Error status returned from ' 'pocket: {error_code} {error_message}').format( error_code=response.status_code, error_message=response.content, )) statsd_client.incr('pocket_request_success') pocket_data = [] if response is not None: try: pocket_data = json.loads(response.content) except (TypeError, ValueError), e: statsd_client.incr('pocket_parse_failure') raise self.PocketException( ('Unable to parse the JSON ' 'response from pocket: {error}').format(error=e)) recommended_urls = [] for recommended_url in pocket_data['list']: recommended_urls.append({ 'url': recommended_url['dedupe_url'], 'pocket_url': recommended_url['url'], 'timestamp': int(recommended_url['published_timestamp']) * 1000, }) try: self.redis_client.setex( self.redis_key, self.redis_data_timeout, json.dumps(recommended_urls), ) except redis.RedisError: raise self.PocketException('Unable to write to redis.') return recommended_urls