我们从Python开源项目中,提取了以下13个代码示例,用于说明如何使用google.appengine.api.urlfetch.set_default_fetch_deadline()。
def iterExport( module, target, importKey, cursor=None ): """ Processes 100 Entries and calls the next batch """ urlfetch.set_default_fetch_deadline(20) Skel = skeletonByKind( module ) if not Skel: logging.error("TaskExportKind: Invalid module") return query = Skel().all().cursor( cursor ) startCursor = cursor query.run(100, keysOnly=True) endCursor = query.getCursor().urlsafe() exportItems(module, target, importKey, startCursor, endCursor) if startCursor is None or startCursor != endCursor: iterExport(module, target, importKey, endCursor)
def process_post(self, algorithm, useAt, data={}.copy()): """ handle POST to run the specified algorithm :param algorithm: algorithm to run ("user/algorithm/version") :param useAt: should '@' be prefixed to 'uid' (in POST body JSON) :param data: additional params to send to the algorithm :return: """ urlfetch.set_default_fetch_deadline(120) uid = extract_uid(self.request.body) if not uid: return {'error':'Invalid Twitter Username'} try: data["query"] = '@'+uid if useAt else uid cache_key = algorithm+' '+json.dumps(collections.OrderedDict(sorted(data.items()))) result = memcache.get(cache_key) if result is None: data["auth"] = TWITTER_AUTH_DICT result = call_algorithmia(algorithm, data).result memcache.add(cache_key, result, settings.AG_CACHE_SECONDS) return result except Exception as x: return {'error': str(x)}
def get(self): urlfetch.set_default_fetch_deadline(60) self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'getMe')))) # /updates ???
def get(self): urlfetch.set_default_fetch_deadline(60) self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'getUpdates')))) # /set-wehook ???
def get(self): urlfetch.set_default_fetch_deadline(60) url = self.request.get('url') if url: self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'setWebhook', urllib.urlencode({'url': url}))))) # /webhook ??? (???? ? API)
def post(self): urlfetch.set_default_fetch_deadline(60) body = json.loads(self.request.body) self.response.write(json.dumps(body)) process_cmds(body['message']) # ?? ? ??? ? ?? ??? ??
def fixAppengine(): software = os.environ.get('SERVER_SOFTWARE') if (isinstance(software, str) and (software.startswith('Development') or software.startswith('Google App Engine'))): from requests_toolbelt.adapters import appengine from google.appengine.api import urlfetch appengine.monkeypatch() urlfetch.set_default_fetch_deadline(600)
def get(self): urlfetch.set_default_fetch_deadline(60) self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'getMe'))))
def get(self): urlfetch.set_default_fetch_deadline(60) self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'getUpdates'))))
def get(self): urlfetch.set_default_fetch_deadline(60) url = self.request.get('url') if url: self.response.write(json.dumps(json.load(urllib2.urlopen(BASE_URL + 'setWebhook', urllib.urlencode({'url': url})))))
def setTimeout(numSec = 60): urlfetch.set_default_fetch_deadline(numSec) # Deserialise object and serialise it to JSON formatted string
def get(self): urlfetch.set_default_fetch_deadline(60) url = self.request.get('url') if url: self.response.write(json.dumps(json.load(urlopen(BASE_URL + 'setWebhook', urlencode({'url': url})))))
def scrape_intelli(): urlfetch.set_default_fetch_deadline(10) roaster = 'Intelligentsia' intelli = 'https://www.intelligentsiacoffee.com/catalog/ajax/products/?filter%5Bcat%5D=5' r = requests.get(intelli) soup = BeautifulSoup(r.content, "html.parser") x = r.json() total_coffees = len(x['data']) coffees_entered = 0 coffees_updated = 0 error_coffees = [] for item in x['data']: name, description, notes, region, active, size, product_url = [""] * 7 price = int() product_url = item['productUrl'] logging.info("Getting url: {}".format(product_url)) try: notes = item['flavor_profile_text'].split(',') except KeyError: notes = [""] name = item['original_name'] description = item['description'] region = item['country'] price = float(item['price']) size = '12oz' active = True image_url = 'https://www.intelligentsiacoffee.com/media/catalog/product' + item[ 'small_image'] image_blob = requests.get(image_url).content coffee_data = { 'name': name, 'roaster': roaster, 'description': description, 'price': price, 'notes': notes, 'region': region, 'active': active, 'product_page': product_url, 'size': size, 'image': image_blob } coffees_updated, coffees_entered, error_coffees = add_or_update_coffee( coffee_data, coffees_updated, coffees_entered, error_coffees) logging.info('Intelligentsia New Results:{} / {}'.format(coffees_entered, total_coffees)) logging.info('Intelligentsia Updated Results:{} / {}'.format( coffees_updated, total_coffees)) if error_coffees: logging.warning('Intelligensia Error coffees are: {}'.format( error_coffees))