我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用google.appengine.api.memcache.add()。
def xsrf_secret_key(): """Return the secret key for use for XSRF protection. If the Site entity does not have a secret key, this method will also create one and persist it. Returns: The secret key. """ secret = memcache.get(XSRF_MEMCACHE_ID, namespace=OAUTH2CLIENT_NAMESPACE) if not secret: # Load the one and only instance of SiteXsrfSecretKey. model = SiteXsrfSecretKey.get_or_insert(key_name='site') if not model.secret: model.secret = _generate_new_xsrf_secret_key() model.put() secret = model.secret memcache.add(XSRF_MEMCACHE_ID, secret, namespace=OAUTH2CLIENT_NAMESPACE) return str(secret)
def open_resource(self, name): """Opens a resource from the zoneinfo subdir for reading.""" name_parts = name.lstrip('/').split('/') if os.path.pardir in name_parts: raise ValueError('Bad path segment: %r' % os.path.pardir) cache_key = 'pytz.zoneinfo.%s.%s' % (pytz.OLSON_VERSION, name) zonedata = memcache.get(cache_key) if zonedata is None: zonedata = get_zoneinfo().read('zoneinfo/' + '/'.join(name_parts)) memcache.add(cache_key, zonedata) logging.info('Added timezone to memcache: %s' % cache_key) else: logging.info('Loaded timezone from memcache: %s' % cache_key) return StringIO(zonedata)
def memcacher(f = None, cachekey=None, expiresec = 600): if not f: return functools.partial(memcacher, cachekey = cachekey, expiresec = expiresec) def getvalue(*args, **kwargs): lcachekey = cachekey if cachekey else make_flash(f, *args, **kwargs) retval = memcache.get(lcachekey) #@UndefinedVariable if retval is None: logdebug("MISS: %s" % lcachekey) retval = f(*args, **kwargs) memcache.add(key=lcachekey, value=retval, time=expiresec) #@UndefinedVariable else: logdebug("HIT: %s" % lcachekey) return retval return getvalue
def GetPolygonTimeSeries(polygon_id,mypath,ref_start,ref_end,series_start,series_end): """Returns details about the polygon with the passed-in ID.""" #details = memcache.get(polygon_id) # If we've cached details for this polygon, return them. #if details is not None: # return details details = {} try: details['timeSeries'] = ComputePolygonTimeSeries(polygon_id,mypath,ref_start,ref_end,series_start,series_end) # Store the results in memcache. #memcache.add(polygon_id, json.dumps(details), MEMCACHE_EXPIRATION) except ee.EEException as e: # Handle exceptions from the EE client library. details['error'] = str(e) # Send the results to the browser. return json.dumps(details)
def _request(self): """Sends the request Returns: list """ if not memcache.add(type(self).__name__, True, 3): time.sleep(3) logging.info('Scraping %s' % (type(self).__name__)) url = "https://www.usatoday.com/sports/mlb/sagarin/2017/team/" response = urlfetch.fetch(url) try: ratings = self._scrape(response.content) except (AttributeError, IndexError) as e: logging.exception(e) ratings = [] return ratings
def get_greetings(self, guestbook_name): """ get_greetings() Checks the cache to see if there are cached greetings. If not, call render_greetings and set the cache Args: guestbook_name: Guestbook entity group key (string). Returns: A string of HTML containing greetings. """ greetings = memcache.get('{}:greetings'.format(guestbook_name)) if greetings is None: greetings = self.render_greetings(guestbook_name) if not memcache.add('{}:greetings'.format(guestbook_name), greetings, 10): logging.error('Memcache set failed.') return greetings # [END check_memcache] # [START query_datastore]
def add_values(): # [START add_values] # Add a value if it doesn't exist in the cache # with a cache expiration of 1 hour. memcache.add(key="weather_USA_98105", value="raining", time=3600) # Set several values, overwriting any existing values for these keys. memcache.set_multi( {"USA_98115": "cloudy", "USA_94105": "foggy", "USA_94043": "sunny"}, key_prefix="weather_", time=3600 ) # Atomically increment an integer value. memcache.set(key="counter", value=0) memcache.incr("counter") memcache.incr("counter") memcache.incr("counter") # [END add_values]
def start_recording(env=None): """Start recording RPC traces. This creates a Recorder instance and sets it for the current request in the global RequestLocalRecorderProxy 'recorder_proxy'. Args: env: Optional WSGI environment; defaults to os.environ. """ recorder_proxy.clear_for_current_request() if env is None: env = os.environ if not config.should_record(env): return if memcache.add(lock_key(), 0, time=config.LOCK_TIMEOUT, namespace=config.KEY_NAMESPACE): recorder_proxy.set_for_current_request(Recorder(env)) if config.DEBUG: logging.debug('Set recorder')
def get_count(name): """Retrieve the value for a given sharded counter. Args: name: The name of the counter. Returns: Integer; the cumulative count of all sharded counters for the given counter name. """ total = memcache.get(name) if total is None: total = 0 all_keys = GeneralCounterShardConfig.all_keys(name) for counter in ndb.get_multi(all_keys): if counter is not None: total += counter.count memcache.add(name, total, 60) return total
def open_resource(self, name): """Opens a resource from the zoneinfo subdir for reading.""" # Import nested here so we can run setup.py without GAE. from google.appengine.api import memcache from pytz import OLSON_VERSION name_parts = name.lstrip('/').split('/') if os.path.pardir in name_parts: raise ValueError('Bad path segment: %r' % os.path.pardir) cache_key = 'pytz.zoneinfo.%s.%s' % (OLSON_VERSION, name) zonedata = memcache.get(cache_key) if zonedata is None: zonedata = get_zoneinfo().read('zoneinfo/' + '/'.join(name_parts)) memcache.add(cache_key, zonedata) log.info('Added timezone to memcache: %s' % cache_key) else: log.info('Loaded timezone from memcache: %s' % cache_key) return StringIO(zonedata)
def process_post(self, algorithm, useAt, data={}.copy()): """ handle POST to run the specified algorithm :param algorithm: algorithm to run ("user/algorithm/version") :param useAt: should '@' be prefixed to 'uid' (in POST body JSON) :param data: additional params to send to the algorithm :return: """ urlfetch.set_default_fetch_deadline(120) uid = extract_uid(self.request.body) if not uid: return {'error':'Invalid Twitter Username'} try: data["query"] = '@'+uid if useAt else uid cache_key = algorithm+' '+json.dumps(collections.OrderedDict(sorted(data.items()))) result = memcache.get(cache_key) if result is None: data["auth"] = TWITTER_AUTH_DICT result = call_algorithmia(algorithm, data).result memcache.add(cache_key, result, settings.AG_CACHE_SECONDS) return result except Exception as x: return {'error': str(x)}
def add(*args, **kwargs): pass
def download(key, filename): if request.if_modified_since: return "HTTP_304_NOT_MODIFIED", 304 memcache_key = 'download_{}'.format(key) data = memcache.get(memcache_key) if data is None: data = get_download(key) memcache.add(memcache_key, data, 86400 * 30) return data
def thumbnail(key,filename): if request.if_modified_since: return "HTTP_304_NOT_MODIFIED", 304 memcache_key = 'thumbnail_{}'.format(key) data = memcache.get(memcache_key) if data is None: data = get_thumbnail(key) memcache.add(memcache_key, data, 86400 * 30) return data
def get(self): polygon = unicode(self.request.get('polygon')) refLow = self.request.get('refLow') refHigh = self.request.get('refHigh') studyLow = self.request.get('studyLow') studyHigh = self.request.get('studyHigh') ref_start = refLow + '-01-01' ref_end = refHigh + '-12-31' series_start = studyLow + '-01-01' series_end = studyHigh + '-12-31' coords = [] for items in eval(polygon): coords.append([items[1],items[0]]) mypoly = ee.FeatureCollection(ee.Geometry.Polygon(coords)) #details = memcache.get(str(counter)) details = ComputePolygonDrawTimeSeries(mypoly,ref_start,ref_end,series_start,series_end) #memcache.add(str(counter), json.dumps(details), MEMCACHE_EXPIRATION) content = json.dumps(details) # ComputePolygonDrawTimeSeries(mypoly,ref_start,ref_end,series_start,series_end) self.response.headers['Content-Type'] = 'application/json' self.response.out.write(content)
def _request(self): """Makes requests to vegasinsider odds pages to get game odds Returns: dict: values are self._scrape() """ if not memcache.add(type(self).__name__, True, 3): time.sleep(3) logging.info('Scraping VegasInsider for %s' % (self.league)) url = "http://www.vegasinsider.com/%s/odds/las-vegas/" % (self.league) response = urlfetch.fetch(url) # time.sleep(3) # url = "http://www.vegasinsider.com/%s/odds/offshore/" % (self.vi_league) # response = urlfetch.fetch(url) # offshore_tree = etree.fromstring(response.content, etree.HTMLParser()) try: vegas_odds = self._scrape(response.content, 1) # offshore_odds = self._scrape(offshore_tree, 8) except IndexError as e: logging.exception(e) vegas_odds = {} return { 'vegas' : vegas_odds, # 'offshore' : offshore_odds }
def test_get_data_present(query_fn, testbed): memcache.add('key', 'data', 9000) data = snippets.get_data() query_fn.assert_not_called() assert data == 'data' memcache.delete('key')
def get_data(): data = memcache.get('key') if data is not None: return data else: data = query_for_data() memcache.add('key', data, 60) return data # [END get_data]
def get_or_add_person(name, userid): person = memcache.get(name) if person is None: person = Person(name=name, userid=userid) memcache.add(name, person) else: logging.info('Found in cache: ' + name + ', userid: ' + person.userid) return person # [END best-practice-2]
def get(self): key = "some-key" # [START memcache-read] v = memcache.get(key) if v is None: v = read_from_persistent_store() memcache.add(key, v) # [END memcache-read] self.response.content_type = 'text/html' self.response.write(str(v))
def emit(self, record): """Log an error to the datastore, if applicable. Args: The logging.LogRecord object. See http://docs.python.org/library/logging.html#logging.LogRecord """ try: if not record.exc_info: return signature = self.__GetSignature(record.exc_info) old_namespace = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') if not memcache.add(signature, None, self.log_interval): return db.run_in_transaction_custom_retries(1, self.__EmitTx, signature, record.exc_info) finally: namespace_manager.set_namespace(old_namespace) except Exception: self.handleError(record)
def gettroopmeetings(troop_key): troopmeetings = [] troopmeeting_keys = memcache.get(Meeting.__getMemcacheKeyString(troop_key)) if troopmeeting_keys is None: troopmeeting_keys = Meeting.query(Meeting.troop==troop_key).fetch(keys_only=True) memcache.add(Meeting.__getMemcacheKeyString(troop_key), troopmeeting_keys) for tm_key in troopmeeting_keys: m = tm_key.get() if m != None: troopmeetings.append(m) troopmeetings.sort(key=lambda x:x.datetime, reverse=True) return troopmeetings
def getTroopPersonsForTroop(troop_key): trooppersons = [] troopperson_keys = memcache.get(TroopPerson.__getMemcacheKeyString(troop_key)) if troopperson_keys is None: troopperson_keys = TroopPerson.query(TroopPerson.troop==troop_key).fetch(keys_only=True) memcache.add(TroopPerson.__getMemcacheKeyString(troop_key), troopperson_keys) for tp_key in troopperson_keys: tp = tp_key.get() if tp != None: trooppersons.append(tp) trooppersons.sort(key=lambda x: (-x.leader, x.sortname)) return trooppersons
def updateMemcache(self): if not memcache.add(self.userid, self): memcache.replace(self.userid, self)
def process_exception(self, suppressed_exceptions=None): exception_name = sys.exc_info()[0].__name__ exception_details = str(sys.exc_info()[1]) exception_traceback = ''.join(traceback.format_exception(*sys.exc_info())) logging.error(exception_traceback) send_email = not (suppressed_exceptions and exception_name in suppressed_exceptions) if send_email: exception_expiration = 3600 # seconds (max 1 mail per hour for a particular exception) mail_admin = SENDER_EMAIL # must be admin for the application sitename = SITENAME ver = os.environ['CURRENT_VERSION_ID'] dev = any(keyword in ver for keyword in TEST_VERSIONS) or tools.on_dev_server() sitename += ":DEV" if dev else ":PROD" session = self.session ename = "Unknown Org" if session and session.has_key('enterprise'): ename = session['enterprise'].name throttle_name = 'exception-' + exception_name throttle = memcache.get(throttle_name) if throttle is None and not dev: memcache.add(throttle_name, 1, exception_expiration) subject = '[%s] exception for %s [%s: %s]' % (sitename, ename, exception_name, exception_details) body = exception_traceback + "\n\n" + self.request.uri mail.send_mail(to=ERROR_EMAIL, sender=mail_admin, subject=subject, body=body) return exception_name, exception_details, exception_traceback