我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用aiohttp.web.HTTPNotFound()。
def http_handler(self, request): if request.path.endswith("api.sock"): return await self.ws_handler(request) if request.path.endswith("/monitor/"): data = pkgutil.get_data("rci.services.monitor", "monitor.html").decode("utf8") return web.Response(text=data, content_type="text/html") if request.path.endswith("/login/github"): if request.method == "POST": url = self.oauth.generate_request_url(("read:org", )) return web.HTTPFound(url) if request.path.endswith("/oauth2/github"): return (await self._oauth2_handler(request)) if request.path.endswith("logout"): if request.method == "POST": sid = request.cookies.get(self.config["cookie_name"]) del(self.sessions[sid]) return web.HTTPFound("/monitor/") return web.HTTPNotFound()
def get_flag(base_url, cc): url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower()) resp = yield from aiohttp.request('GET', url) with contextlib.closing(resp): if resp.status == 200: image = yield from resp.read() return image elif resp.status == 404: raise web.HTTPNotFound() else: raise aiohttp.HttpProcessingError( code=resp.status, message=resp.reason, headers=resp.headers) # BEGIN FLAGS2_ASYNCIO_EXECUTOR
def download_one(cc, base_url, semaphore, verbose): try: with (yield from semaphore): image = yield from get_flag(base_url, cc) except web.HTTPNotFound: status = HTTPStatus.not_found msg = 'not found' except Exception as exc: raise FetchError(cc) from exc else: loop = asyncio.get_event_loop() # <1> loop.run_in_executor(None, # <2> save_flag, image, cc.lower() + '.gif') # <3> status = HTTPStatus.ok msg = 'OK' if verbose and msg: print(cc, msg) return Result(status, cc) # END FLAGS2_ASYNCIO_EXECUTOR
def http_get(url): res = yield from aiohttp.request('GET', url) if res.status == 200: ctype = res.headers.get('Content-type', '').lower() if 'json' in ctype or url.endswith('json'): data = yield from res.json() # <1> else: data = yield from res.read() # <2> return data elif res.status == 404: raise web.HTTPNotFound() else: raise aiohttp.errors.HttpProcessingError( code=res.status, message=res.reason, headers=res.headers)
def download_one(cc, base_url, semaphore, verbose): try: with (yield from semaphore): # <5> image = yield from get_flag(base_url, cc) with (yield from semaphore): country = yield from get_country(base_url, cc) except web.HTTPNotFound: status = HTTPStatus.not_found msg = 'not found' except Exception as exc: raise FetchError(cc) from exc else: country = country.replace(' ', '_') filename = '{}-{}.gif'.format(country, cc) loop = asyncio.get_event_loop() loop.run_in_executor(None, save_flag, image, filename) status = HTTPStatus.ok msg = 'OK' if verbose and msg: print(cc, msg) return Result(status, cc) # END FLAGS3_ASYNCIO
def download_one(cc, base_url, semaphore, verbose): # <3> try: with (await semaphore): # <4> image = await get_flag(base_url, cc) # <5> except web.HTTPNotFound: # <6> status = HTTPStatus.not_found msg = 'not found' except Exception as exc: raise FetchError(cc) from exc # <7> else: save_flag(image, cc.lower() + '.gif') # <8> status = HTTPStatus.ok msg = 'OK' if verbose and msg: print(cc, msg) return Result(status, cc) # END FLAGS2_ASYNCIO_TOP # BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
def register_in_memory_block_store_api(app, prefix='/blockstore'): # Really, really simple stuff ;-) blocks = {} async def api_block_get(request): id = request.match_info['id'] try: return web.Response(body=blocks[id], content_type='application/octet-stream') except KeyError: raise web.HTTPNotFound() async def api_block_post(request): id = request.match_info['id'] if id in blocks: raise web.HTTPConflict() blocks[id] = await request.read() return web.Response() app.router.add_get(prefix + '/{id}', api_block_get) app.router.add_post(prefix + '/{id}', api_block_post)
def delete_file(request: web.Request): filename = request.match_info.get('name').strip() filepath = os.path.join(config.args.storage, filename) if filename in replication.dellog: # We know this already raise web.HTTPNotFound() if not os.path.exists(filepath): if not request.headers['User-Agent'].startswith('cockatiel/'): logger.debug('File {} does not exist, but we will still propagate the deletion.'.format(filename)) replication.dellog.put(filename) replication.queue_operation('DELETE', filename) raise web.HTTPNotFound() os.remove(filepath) # TODO: Clean up now-empty dictionaries logger.debug('Deleted file {}, scheduling replication.'.format(filename)) replication.dellog.put(filename) replication.queue_operation('DELETE', filename) return web.Response()
def restart(request): prefix = request.match_info['prefix'] try: repo = getattr(scopes, prefix).name except AttributeError: return web.HTTPNotFound() ref = request.match_info['ref'] ref = Ref(repo, ref, '<sha>') targets = request.GET.get('t', '').split(',') all = request.GET.get('all') request.app.loop.create_task(ci(ref, targets, all)) await asyncio.sleep(2) log_url = '%slatest/%s/' % (conf['log_url'], ref.uid) return web.HTTPFound(log_url)
def call(self, request): msg_id = self.request.match_info['id'] data = await self.query(message_id=msg_id) await self.insert_events(data) if len(data['hits']['hits']) == 0: raise HTTPNotFound(text='message not found') data = data['hits']['hits'][0] preview_path = self.app.router['user-preview'].url_for(**self.request.match_info) return dict( base_template='user/base-{}.jinja'.format('raw' if self.request.query.get('raw') else 'page'), title='{_type} - {_id}'.format(**data), id=data['_id'], method=data['_type'], details=self._details(data), events=list(self._events(data)), preview_url=self.full_url(f'{preview_path}?{self.request.query_string}'), attachments=list(self._attachments(data)), )
def get_category(request): """ Args: request: category_name is required Returns: the configuration items in the given category. :Example: curl -X GET http://localhost:8081/category/PURGE_READ """ category_name = request.match_info.get('category_name', None) if not category_name: raise web.HTTPBadRequest(reason="Category Name is required") # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) category = await cf_mgr.get_category_all_items(category_name) if category is None: raise web.HTTPNotFound(reason="No such Category Found for {}".format(category_name)) return web.json_response(category)
def get_category_item(request): """ Args: request: category_name & config_item are required Returns: the configuration item in the given category. :Example: curl -X GET http://localhost:8081/foglamp/category/PURGE_READ/age """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) if not category_name or not config_item: raise web.HTTPBadRequest(reason="Both Category Name and Config items are required") # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) category_item = await cf_mgr.get_category_item(category_name, config_item) if category_item is None: raise web.HTTPNotFound(reason="No Category Item Found") return web.json_response(category_item)
def get_backups(request): """ Returns a list of all backups :Example: curl -X GET http://localhost:8082/foglamp/backup :Example: curl -X GET http://localhost:8082/foglamp/backup?limit=2&skip=1&status=complete """ try: limit = int(request.query['limit']) if 'limit' in request.query else None skip = int(request.query['skip']) if 'skip' in request.query else None status = request.query['status'] if 'status' in request.query else None # TODO : Fix after actual implementation Backup.get_backup_list.return_value = [{'id': 28, 'date': '2017-08-30 04:05:10.382', 'status': 'running'}, {'id': 27, 'date': '2017-08-29 04:05:13.392', 'status': 'failed'}, {'id': 26, 'date': '2017-08-28 04:05:08.201', 'status': 'complete'}] # backup_json = [{"id": b[0], "date": b[1], "status": b[2]} # for b in Backup.get_backup_list(limit=limit, skip=skip, status=status)] backup_json = Backup.get_backup_list(limit=limit, skip=skip, status=status) except Backup.DoesNotExist: raise web.HTTPNotFound(reason='No backups found for queried parameters') return web.json_response({"backups": backup_json})
def delete_backup(request): """ Delete a backup :Example: curl -X DELETE http://localhost:8082/foglamp/backup/1 """ backup_id = request.match_info.get('backup_id', None) if not backup_id: raise web.HTTPBadRequest(reason='Backup id is required') else: try: backup_id = int(backup_id) except ValueError: raise web.HTTPBadRequest(reason='Invalid backup id') try: # TODO : Fix after actual implementation Backup.delete_backup.return_value = "Backup deleted successfully" except Backup.DoesNotExist: raise web.HTTPNotFound(reason='Backup with {} does not exist'.format(backup_id)) _resp = Backup.delete_backup(id=backup_id) return web.json_response({'message': _resp})
def restore_backup(request): """ Restore from a backup :Example: curl -X PUT http://localhost:8082/foglamp/backup/1/restore """ backup_id = request.match_info.get('backup_id', None) if not backup_id: raise web.HTTPBadRequest(reason='Backup id is required') else: try: backup_id = int(backup_id) except ValueError: raise web.HTTPBadRequest(reason='Invalid backup id') try: # TODO : Fix after actual implementation Backup.restore_backup.return_value = 1 except Backup.DoesNotExist: raise web.HTTPNotFound(reason='Backup with {} does not exist'.format(backup_id)) try: Backup.restore_backup(id=backup_id) return web.json_response({'message': 'Restore backup with id {} started successfully'.format(backup_id)}) except Backup.RestoreFailed as ex: return web.json_response({'error': 'Restore backup with id {} failed, reason {}'.format(backup_id, ex)})
def get_scheduled_process(request): """ Returns a list of all the defined scheduled_processes from scheduled_processes table """ scheduled_process_name = request.match_info.get('scheduled_process_name', None) if not scheduled_process_name: raise web.HTTPBadRequest(reason='No Scheduled Process Name given') payload = PayloadBuilder().SELECT(("name")).WHERE(["name", "=", scheduled_process_name]).payload() _storage = connect.get_storage() scheduled_process = _storage.query_tbl_with_payload('scheduled_processes', payload) if len(scheduled_process['rows']) == 0: raise web.HTTPNotFound(reason='No such Scheduled Process: {}.'.format(scheduled_process_name)) return web.json_response(scheduled_process['rows'][0].get("name")) ################################# # Schedules #################################
def delete_schedule(request): """ Delete a schedule from schedules table :Example: curl -X DELETE http://localhost:8082/foglamp/schedule/dc9bfc01-066a-4cc0-b068-9c35486db87f """ try: schedule_id = request.match_info.get('schedule_id', None) if not schedule_id: raise web.HTTPBadRequest(reason='Schedule ID is required.') try: assert uuid.UUID(schedule_id) except ValueError as ex: raise web.HTTPNotFound(reason="Invalid Schedule ID {}".format(schedule_id)) await server.Server.scheduler.delete_schedule(uuid.UUID(schedule_id)) return web.json_response({'message': 'Schedule deleted successfully', 'id': schedule_id}) except (ValueError, ScheduleNotFoundError) as ex: raise web.HTTPNotFound(reason=str(ex))
def unregister(request): """ Deregister a service :Example: curl -X DELETE http://localhost:8082/foglamp/service/dc9bfc01-066a-4cc0-b068-9c35486db87f """ try: service_id = request.match_info.get('service_id', None) if not service_id: raise web.HTTPBadRequest(reason='Service id is required') try: Service.Instances.get(idx=service_id) except Service.DoesNotExist: raise web.HTTPBadRequest(reason='Service with {} does not exist'.format(service_id)) Service.Instances.unregister(service_id) _resp = {'id': str(service_id), 'message': 'Service unregistered'} return web.json_response(_resp) except ValueError as ex: raise web.HTTPNotFound(reason=str(ex))
def error_middleware(app, handler): async def middleware_handler(request): if_trace = request.query.get('trace') if 'trace' in request.query and request.query.get('trace') == '1' else None try: response = await handler(request) if response.status == 404: return handle_api_exception({"code": response.status, "message": response.message}, ex.__class__.__name__, if_trace) return response except (web.HTTPNotFound, web.HTTPBadRequest) as ex: return handle_api_exception({"code": ex.status_code, "message": ex.reason}, ex.__class__.__name__, if_trace) except web.HTTPException as ex: raise # Below Exception must come last as it is the super class of all exceptions except Exception as ex: return handle_api_exception(ex, ex.__class__.__name__, if_trace) return middleware_handler
def http_handler(self, request): path = request.path.split("/")[2] logging.info("%s %s", request, path) handler = getattr(self, "_http_%s" % path, None) if handler: return await handler(request) return web.HTTPNotFound()
def handle_wsdl_request(self, req): ctx = AioMethodContext(self, req, 'text/xml; charset=utf-8', aiohttp_app=self._aiohttp_app) if self.doc.wsdl11 is None: raise web.HTTPNotFound(headers=ctx.transport.resp_headers) if self._wsdl is None: self._wsdl = self.doc.wsdl11.get_interface_document() ctx.transport.wsdl = self._wsdl if ctx.transport.wsdl is None: with self._mtx_build_interface_document: try: ctx.transport.wsdl = self._wsdl if ctx.transport.wsdl is None: actual_url = urlunparse([req.scheme, req.host, req.path, '', '', '']) self.doc.wsdl11.build_interface_document(actual_url) ctx.transport.wsdl = self._wsdl = self.doc.wsdl11.get_interface_document() except Exception as e: logger.exception(e) ctx.transport.wsdl_error = e self.event_manager.fire_event('wsdl_exception', ctx) raise web.HTTPInternalServerError(headers=ctx.transport.resp_headers) self.event_manager.fire_event('wsdl', ctx) ctx.transport.resp_headers['Content-Length'] = str(len(ctx.transport.wsdl)) ctx.close() return await self.make_streaming_response( req=req, code=200, headers=ctx.transport.resp_headers, content=[ctx.transport.wsdl])
def download_file(self, request): file_id = request.match_info['file_id'] record = await db.tracks.find_one({ "file_id": file_id }) if not record: return web.HTTPNotFound() file = await self.bot.get_file(file_id) file_path = file["file_path"] range = request.headers.get("range") copy_headers = ["content-length", "content-range", "etag", "last-modified"] async with self.bot.download_file(file_path, range) as r: # Prepare headers resp = web.StreamResponse(status=r.status) resp.content_type = record["mime_type"] for h in copy_headers: val = r.headers.get(h) if val: resp.headers[h] = val await resp.prepare(request) # Send content while True: chunk = await r.content.read(chunk_size) if not chunk: break resp.write(chunk) return resp
def get_flag(base_url, cc): # <2> url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower()) resp = yield from aiohttp.request('GET', url) with contextlib.closing(resp): if resp.status == 200: image = yield from resp.read() return image elif resp.status == 404: raise web.HTTPNotFound() else: raise aiohttp.HttpProcessingError( code=resp.status, message=resp.reason, headers=resp.headers)
def get_flag(base_url, cc): # <2> url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower()) with closing(await aiohttp.request('GET', url)) as resp: if resp.status == 200: image = await resp.read() return image elif resp.status == 404: raise web.HTTPNotFound() else: raise aiohttp.HttpProcessingError( code=resp.status, message=resp.reason, headers=resp.headers)
def register_start_api(app, dispatcher, route='/start'): async def api_cipherkey_get(request): eff = Effect(EPrivKeyGet(request.match_info['hash'])) try: key = await asyncio_perform(dispatcher, eff) except PrivKeyNotFound: raise web.HTTPNotFound(text='Unknown hash') return web.Response(body=key, content_type='application/octet-stream') async def api_cipherkey_post(request): cipherkey = await request.read() hash = request.match_info['hash'] eff = Effect(EPrivKeyAdd(hash, cipherkey)) try: await asyncio_perform(dispatcher, eff) except PrivKeyHashCollision: raise web.HTTPConflict(text='This hash already exists...') logger.info('New cipherkey `%s` registered' % hash) return web.Response() async def api_pubkey_post(request): pubkey = await request.read() # TODO: should provide a token to avoid impersonation identity = request.match_info['identity'] eff = Effect(EPubKeyAdd(identity, pubkey)) try: await asyncio_perform(dispatcher, eff) except ParsecError as exc: return web.HTTPConflict(text=exc.label) logger.info('New identity `%s` registered' % identity) return web.Response() app.router.add_get(route + '/cipherkey/{hash}', api_cipherkey_get) app.router.add_post(route + '/cipherkey/{hash}', api_cipherkey_post) app.router.add_post(route + '/pubkey/{identity}', api_pubkey_post)
def version(request): # Use the version.json file in the current dir. with suppress(IOError): with open(VERSION_FILE) as fd: return web.json_response(json.load(fd)) return web.HTTPNotFound()
def cli(loop, test_client): async def error403(request): raise web.HTTPForbidden() async def error404(request): return web.HTTPNotFound() async def error(request): raise ValueError() app = get_app(loop=loop) app.router.add_get('/error', error) app.router.add_get('/error-403', error403) app.router.add_get('/error-404', error404) return loop.run_until_complete(test_client(app))
def ajax(f): """ Enforces the use of AJAX requests to access a resource. (Raises HTTPNotFound otherwise) """ @wraps(f) async def wrapped(request): if not is_ajax(request): raise HTTPNotFound() return await f(request) return wrapped
def browsers(self, request): version = request.match_info['version'] if version not in self.files: raise web.HTTPNotFound( text='No data was found for version {version}'.format( version=version, ), ) return web.json_response(body=self.files[version])
def get_file(request: web.Request): filename = request.match_info.get('name').strip() filepath = os.path.join(config.args.storage, filename) _, ext = os.path.splitext(filepath) etag = hashlib.sha1(filename.encode('utf-8')).hexdigest() if not os.path.exists(filepath): raise web.HTTPNotFound() if 'If-None-Match' in request.headers: raise web.HTTPNotModified(headers={ 'ETag': etag }) stat = os.stat(filepath) if request.method == 'HEAD': resp = web.Response() else: resp = web.StreamResponse() resp.headers['Content-Type'] = mimetypes.types_map.get(ext, 'application/octet-stream') resp.headers['ETag'] = etag resp.headers['Cache-Control'] = 'max-age=31536000' resp.headers['X-Content-SHA1'] = get_hash_from_name(filename) resp.content_length = stat.st_size resp.last_modified = stat.st_mtime if request.method == 'HEAD': return resp yield from resp.prepare(request) with open(filepath, 'rb') as f: for chunk in chunks(f): resp.write(chunk) yield from resp.drain() yield from resp.write_eof() resp.force_close() return resp
def poll(self, request): question_id = request.match_info['question_id'] try: question, choices = await db.get_question(self.postgres, question_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) return { 'question': question, 'choices': choices }
def results(self, request): question_id = request.match_info['question_id'] try: question, choices = await db.get_question(self.postgres, question_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) return { 'question': question, 'choices': choices }
def vote(self, request): question_id = int(request.match_info['question_id']) data = await request.post() try: choice_id = int(data['choice']) except (KeyError, TypeError, ValueError) as e: raise web.HTTPBadRequest( text='You have not specified choice value') from e try: await db.vote(self.postgres, question_id, choice_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) router = request.app.router url = router['results'].url(parts={'question_id': question_id}) return web.HTTPFound(location=url)
def user_timeline(self, request): username = request.match_info['username'] profile_user = await self.mongo.user.find_one({'username': username}) if profile_user is None: raise web.HTTPNotFound() followed = False session = await get_session(request) user_id = session.get('user_id') user = None if user_id: user = await self.mongo.user.find_one({'_id': ObjectId(user_id)}) followed = await self.mongo.follower.find_one( {'who_id': ObjectId(session['user_id']), 'whom_id': {'$in': [ObjectId(profile_user['_id'])]}}) followed = followed is not None messages = await (self.mongo.message .find({'author_id': ObjectId(profile_user['_id'])}) .sort('pub_date', -1) .to_list(30)) profile_user['_id'] = str(profile_user['_id']) return {"messages": messages, "followed": followed, "profile_user": profile_user, "user": user, "endpoint": request.match_info.route.name}
def get_pet(request): pet_id = request.match_info['petId'] if pet_id == '5': return web.HTTPNotFound() return web.json_response({ 'id': int(pet_id), 'name': 'Lili', 'photoUrls': [], })
def update_pet_formdata(request): post_data = await request.post() if not ( request.match_info['petId'] == '12' and post_data.get('name') == 'Vivi' and post_data.get('status') == 'sold' and request.headers.get('userId') == '42' ): return web.HTTPNotFound() return web.json_response({})
def static_request_handler(cls: Any, obj: Any, context: Dict, func: Any, path: str, base_url: str) -> Any: if '?P<filename>' not in base_url: pattern = r'^{}(?P<filename>.+?)$'.format(re.sub(r'\$$', '', re.sub(r'^\^?(.*)$', r'\1', base_url))) else: pattern = r'^{}$'.format(re.sub(r'\$$', '', re.sub(r'^\^?(.*)$', r'\1', base_url))) compiled_pattern = re.compile(pattern) if path.startswith('/'): path = os.path.dirname(path) else: path = '{}/{}'.format(os.path.dirname(context.get('context', {}).get('_service_file_path')), path) if not path.endswith('/'): path = '{}/'.format(path) async def handler(request: web.Request) -> web.Response: result = compiled_pattern.match(request.path) filename = result.groupdict()['filename'] filepath = '{}{}'.format(path, filename) try: if os.path.isdir(filepath) or not os.path.exists(filepath): raise web.HTTPNotFound() pathlib.Path(filepath).open('r') return FileResponse(filepath) except PermissionError as e: raise web.HTTPForbidden() context['_http_routes'] = context.get('_http_routes', []) context['_http_routes'].append(('GET', pattern, handler)) start_func = cls.start_server(obj, context) return (await start_func) if start_func else None
def repo(request): prefix = request.match_info['prefix'] try: repo_name = getattr(scopes, prefix).repo except AttributeError: return web.HTTPNotFound() def info(name, pr=False): ref = '%s/%s' % ('pull' if pr else 'heads', name) if pr: lxc = '%spr-%s' % (prefix, name) gh_url = 'https://github.com/%s/pull/%s' % (repo_name, name) else: name_cleaned = re.sub('[^a-z0-9]', '', name.lower()) lxc = '%s-%s' % (prefix, name_cleaned) gh_url = 'https://github.com/%s/commits/%s' % (repo_name, ref) return { 'protected_db': lxc in conf['protected_dbs'], 'name': name, 'lxc': lxc, 'gh_url': gh_url, 'url': 'http://%s.%s' % (lxc, conf['domain']), 'restart_url': get_restart_url(prefix, ref), 'logs_url': '%slatest/%s/' % (conf['log_url'], lxc), } resp, body = await gh_api('repos/%s/pulls?per_page=100' % repo_name) pulls = [info(i['number'], True) for i in body] resp, body = await gh_api('repos/%s/branches?per_page=100' % repo_name) branches = [info(i['name']) for i in body] refs = [ {'title': 'Pull requests', 'items': pulls}, {'title': 'Branches', 'items': branches} ] return render_tpl(repo_tpl, {'refs': refs})
def simple(request): try: letter = request.url.query['q'] except KeyError: raise web.HTTPBadRequest() try: zenline = zenlines[letter] except KeyError: raise web.HTTPNotFound() return web.Response(text=zenline)
def thumbs(request, crop=True): """ Return an image/jpeg image that's a thumbnail of the encoded request. """ encoded = request.match_info['encoded'] cached_thumb = os.path.join(args.cache, encoded) if not os.path.isfile(cached_thumb): try: __, w_x_h, path = decode(CIPHER_KEY, encoded).split(':', 3) except (binascii.Error, UnicodeDecodeError, ValueError): return web.HTTPNotFound() # WISHLIST add as extra context to the aiohttp.access logger logger.info('Decoded as %s %s', path, w_x_h) abspath = args.STORAGE_DIR + path try: im = Image.open(abspath) except (FileNotFoundError, IsADirectoryError): return web.HTTPNotFound() thumb_dimension = [int(x) for x in w_x_h.split('x')] with open(cached_thumb, 'wb') as fh: if crop: cropped_im = crop_1(im) cropped_im.thumbnail(thumb_dimension) cropped_im.save(fh, 'jpeg') else: im.thumbnail(thumb_dimension) im.save(fh, 'jpeg') with open(cached_thumb, 'rb') as fh: return web.Response( status=200, body=fh.read(), content_type='image/jpeg', headers={ 'Cache-Control': 'max-age=86400', })
def call(self, request): es_query = { 'bool': { 'filter': [ {'match_all': {}} if self.session.company == '__all__' else {'term': {'company': self.session.company}}, ] + [ {'term': {'_id': request.match_info['id']}} ] } } method = request.match_info['method'] r = await self.app['es'].get( f'messages/{method}/_search?filter_path=hits', query=es_query ) data = await r.json() if data['hits']['total'] != 1: raise HTTPNotFound(text='message not found') source = data['hits']['hits'][0]['_source'] body = source['body'] if method.startswith('sms'): # need to render the sms so it makes sense to users return { 'from': source['from_name'], 'to': source['to_last_name'] or source['to_address'], 'status': source['status'], 'message': body, 'extra': source.get('extra') or {}, } else: return {'raw': body}
def get_flag(base_url, cc): # <2> url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower()) async with aiohttp.ClientSession() as session: with async_timeout.timeout(10): async with session.get(url) as resp: if resp.status == 200: image = await resp.read() # <5> return image elif resp.status == 404: raise web.HTTPNotFound() else: raise aiohttp.HttpProcessingError( code=resp.status, message=resp.reason, headers=resp.headers)
def get(self): id = self.match['id'] if id.isdigit() is False: raise web.HTTPNotFound() data = await self.redis.get('Article', id) if data is None: raise web.HTTPNotFound() # ???? data['created_date'] = todate(data['created_time'], '%Y-%m-%d %H:%M:%S') data['updated_date'] = todate(data['updated_time'], '%Y-%m-%d %H:%M:%S') # ?? try: data['citations'] = [render(item)[3:-5] for item in data.get('citation').split('|')] except AttributeError: data['citations'] = [] data['tags'] = [item for item in data.get('tag').split('|')] if len(re.findall('[$]{1,2}', data['text'])) > 0: math = True else: math = False return geass({ 'article': data, 'math': math, 'PAGE_IDENTIFIER': self.request.app.router['article'].url( parts={'id': id} ), 'dev': not config.dev, 'comment': True }, self.request, 'public/article.html')
def set_configuration_item(request): """ Args: request: category_name, config_item, {"value" : <some value>} are required Returns: set the configuration item value in the given category. :Example: curl -X PUT -H "Content-Type: application/json" -d '{"value": <some value> }' http://localhost:8081/foglamp/category/{category_name}/{config_item} For {category_name}=>PURGE update value for {config_item}=>age curl -X PUT -H "Content-Type: application/json" -d '{"value": 24}' http://localhost:8081/foglamp/category/PURGE/age """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) data = await request.json() # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) try: value = data['value'] await cf_mgr.set_category_item_value_entry(category_name, config_item, value) result = await cf_mgr.get_category_item(category_name, config_item) if result is None: raise web.HTTPNotFound(reason="No detail found for the category_name: {} and config_item: {}".format(category_name, config_item)) except KeyError: raise web.HTTPBadRequest(reason='Missing required value for {}'.format(config_item)) return web.json_response(result)