我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用falcon.HTTPNotFound()。
def on_post(self, req, resp, revision_id, tag=None): """Creates a revision tag.""" body = req.stream.read(req.content_length or 0) try: tag_data = yaml.safe_load(body) except yaml.YAMLError as e: error_msg = ("Could not parse the request body into YAML data. " "Details: %s." % e) LOG.error(error_msg) raise falcon.HTTPBadRequest(description=e) try: resp_tag = db_api.revision_tag_create(revision_id, tag, tag_data) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) except errors.RevisionTagBadFormat as e: raise falcon.HTTPBadRequest(description=e.format_message()) resp_body = revision_tag_view.ViewBuilder().show(resp_tag) resp.status = falcon.HTTP_201 resp.body = resp_body
def _show_validation_entry(self, req, resp, revision_id, validation_name, entry_id): try: entry_id = int(entry_id) except ValueError: raise falcon.HTTPBadRequest( description='The {entry_id} parameter must be an integer.') try: entry = db_api.validation_get_entry( revision_id, validation_name, entry_id) except (errors.RevisionNotFound, errors.ValidationNotFound) as e: raise falcon.HTTPNotFound(description=e.format_message()) resp_body = self.view_builder.show_entry(entry) return resp_body
def on_delete(self, req, resp, dataset_id, **kwargs): """Delete a dataset from the service This method will delete the entry from the datbase and will also delete the entire datasets generated by them on filesystem. :param integer dataset_id: Unique ID of dataset :returns: Nothing if operation was successful :rtype: 204 NO CONTENT """ try: delete_task = async_tasks.delete_dataset_by_id(dataset_id) except LookupError: raise falcon.HTTPNotFound(description="Couldn't locate dataset") except OSError as err: raise falcon.HTTPInternalServerError(description=str(err)) else: resp.status = falcon.HTTP_204
def on_get(self, req, resp): """Return all datasets available on the service :query boolean use_cache: False if cache must be reloaded, True if values returned can be those cached. :returns: A list with all datasets """ cache = req.get_param_as_bool("use_cache", blank_as_true=True) dao = data_access.DatasetDAO() listdts, err = dao.get_all_datasets(use_cache=cache) if listdts is None: raise falcon.HTTPNotFound(description=str(err)) response = [{"dataset": dtst.to_dict()} for dtst in listdts] resp.body = json.dumps(response) resp.content_type = 'application/json' resp.status = falcon.HTTP_200
def guarded_session(): ''' Context manager that will automatically close session on exceptions ''' try: session = Session() yield session except IrisValidationException as e: session.close() raise HTTPBadRequest('Validation error', str(e)) except (HTTPForbidden, HTTPUnauthorized, HTTPNotFound, HTTPBadRequest): session.close() raise except Exception: session.close() logger.exception('SERVER ERROR') raise
def _second_hook(req, resp, resource, params): headers = req.headers methods = headers.get('URL-METHODS', '').split(',') if req.method not in methods: raise falcon.HTTPNotFound()
def get_object(self, req, resp, path_params, for_update=False): try: obj = self.objects_class.get(*path_params, using=self.connection) except NotFoundError: raise HTTPNotFound() return obj
def get_object(self, req, resp, path_params, for_update=False, db_session=None): """ :param req: Falcon request :type req: falcon.request.Request :param resp: Falcon response :type resp: falcon.response.Response :param path_params: path params extracted from URL path :type path_params: dict :param for_update: if the object is going to be updated or deleted :type for_update: bool :param db_session: SQLAlchemy session :type db_session: sqlalchemy.orm.session.Session """ query = db_session.query(self.objects_class) if for_update: query = query.with_for_update() for key, value in path_params.items(): attr = getattr(self.objects_class, key, None) query = query.filter(attr == value) conditions = dict(req.params) if self.PARAM_RELATIONS in conditions: conditions.pop(self.PARAM_RELATIONS) query = self.filter_by(query, conditions) try: obj = query.one() except NoResultFound: raise HTTPNotFound() except MultipleResultsFound: raise HTTPBadRequest('Multiple results', 'Query params match multiple records') return obj
def get_object(self, req, resp, path_params, for_update=False): pk = req.context['doc'].get('pk') if not pk: raise HTTPNotFound() obj = self.objects_class.get(pk) if obj is None: raise HTTPNotFound() return obj
def _show_tag(self, req, resp, revision_id, tag): """Retrieve details for a specified tag.""" try: resp_tag = db_api.revision_tag_get(revision_id, tag) except (errors.RevisionNotFound, errors.RevisionTagNotFound) as e: raise falcon.HTTPNotFound(description=e.format_message()) resp_body = revision_tag_view.ViewBuilder().show(resp_tag) resp.status = falcon.HTTP_200 resp.body = resp_body
def _list_all_tags(self, req, resp, revision_id): """List all tags for a revision.""" try: resp_tags = db_api.revision_tag_get_all(revision_id) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(e.format_message()) resp_body = revision_tag_view.ViewBuilder().list(resp_tags) resp.status = falcon.HTTP_200 resp.body = resp_body
def _delete_tag(self, req, resp, revision_id, tag): """Delete a specified tag.""" try: db_api.revision_tag_delete(revision_id, tag) except (errors.RevisionNotFound, errors.RevisionTagNotFound) as e: raise falcon.HTTPNotFound(description=e.format_message()) resp.status = falcon.HTTP_204
def _delete_all_tags(self, req, resp, revision_id): """Delete all tags for a revision.""" try: db_api.revision_tag_delete_all(revision_id) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) resp.status = falcon.HTTP_204
def _show_revision(self, req, resp, revision_id): """Returns detailed description of a particular revision. The status of each ValidationPolicy belonging to the revision is also included. """ try: revision = db_api.revision_get(revision_id) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) revision_resp = self.view_builder.show(revision) resp.status = falcon.HTTP_200 resp.body = revision_resp
def on_post(self, req, resp, revision_id, validation_name): validation_data = req.stream.read(req.content_length or 0) try: validation_data = yaml.safe_load(validation_data) except yaml.YAMLError as e: error_msg = ("Could not parse the validation into YAML data. " "Details: %s." % e) LOG.error(error_msg) raise falcon.HTTPBadRequest(description=six.text_type(e)) if not validation_data: error_msg = 'Validation payload must be provided.' LOG.error(error_msg) raise falcon.HTTPBadRequest(description=error_msg) if not all([validation_data.get(x) for x in ('status', 'validator')]): error_msg = 'Validation payload must contain keys: %s.' % ( ', '.join(['"status"', '"validator"'])) LOG.error(error_msg) raise falcon.HTTPBadRequest(description=error_msg) try: resp_body = db_api.validation_create( revision_id, validation_name, validation_data) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) resp.status = falcon.HTTP_201 resp.append_header('Content-Type', 'application/x-yaml') resp.body = self.view_builder.show(resp_body)
def _list_validation_entries(self, req, resp, revision_id, validation_name): try: entries = db_api.validation_get_all_entries(revision_id, validation_name) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) resp_body = self.view_builder.list_entries(entries) return resp_body
def _list_all_validations(self, req, resp, revision_id): try: validations = db_api.validation_get_all(revision_id) except errors.RevisionNotFound as e: raise falcon.HTTPNotFound(description=e.format_message()) resp_body = self.view_builder.list(validations) return resp_body
def on_get(self, req, resp, sanitized_params, revision_id): """Returns all documents for a `revision_id`. Returns a multi-document YAML response containing all the documents matching the filters specified via query string parameters. Returned documents will be as originally posted with no substitutions or layering applied. """ include_encrypted = policy.conditional_authorize( 'deckhand:list_encrypted_documents', req.context, do_raise=False) order_by = sort_by = None if 'order' in sanitized_params: order_by = sanitized_params.pop('order') if 'sort' in sanitized_params: sort_by = sanitized_params.pop('sort') filters = sanitized_params.copy() filters['metadata.storagePolicy'] = ['cleartext'] if include_encrypted: filters['metadata.storagePolicy'].append('encrypted') filters['deleted'] = False # Never return deleted documents to user. try: documents = db_api.revision_get_documents( revision_id, **filters) except errors.RevisionNotFound as e: LOG.exception(six.text_type(e)) raise falcon.HTTPNotFound(description=e.format_message()) sorted_documents = utils.multisort(documents, sort_by, order_by) resp.status = falcon.HTTP_200 resp.body = self.view_builder.list(sorted_documents)
def _retrieve_documents_for_rendering(self, revision_id, **filters): try: documents = db_api.revision_get_documents( revision_id, **filters) except errors.RevisionNotFound as e: LOG.exception(six.text_type(e)) raise falcon.HTTPNotFound(description=e.format_message()) else: return documents
def on_get(self, req, resp): raise falcon.HTTPNotFound()
def on_get(self, req, resp): raise falcon.HTTPNotFound(description='Not Found')
def on_get(self, req, resp, **kwargs): # simulate that the endpoint is hit but raise a 404 because # the object isn't found in the database raise falcon.HTTPNotFound()
def on_get(self, request, response, vertex_id): try: results = self.parent.graph.query_vertices({ "id" : vertex_id }) response.body = json.dumps(list(results)[0]) response.status = falcon.HTTP_200 except: raise falcon.HTTPNotFound()
def on_get(self, request, response): query = dict() request.get_param_as_int('transaction', store=query) if "transaction" in query: task = self.client.get_task(query['transaction']) if task is None: raise falcon.HTTPNotFound() else: response.body = task response.status = falcon.HTTP_200 else: response.body = json.dumps({}, encoding='utf-8') response.status = falcon.HTTP_200
def on_delete(self, req, resp, job_id=None): job = self.eventloop.job_by_id(job_id) if not job: raise falcon.HTTPNotFound() job.set_status(eva.job.DELETED) self.set_response_message(resp, "The job '%s' has been marked for deletion.")
def on_get(self, req, resp, filename): suffix = os.path.splitext(req.path)[1] resp.content_type = mimes.get(suffix, 'application/octet-stream') filepath = os.path.join(ui_root, self.path, secure_filename(filename)) try: resp.stream = open(filepath, 'rb') resp.stream_len = os.path.getsize(filepath) except IOError: raise HTTPNotFound()
def on_post(self, req, resp, dataset_id, dataset_dto, entities_pair): """This method return the true distance between two entities {"distance": ["http://www.wikidata.org/entity/Q1492", "http://www.wikidata.org/entity/Q2807"] } :param int dataset_id: The dataset identifier on database :param DTO dataset_dto: The Dataset DTO from dataset_id (from hook) :param tuple entities_pair: A pair of entities (from hook) :returns: A distance attribute, float number :rtype: dict """ dataset_dao = data_access.DatasetDAO() dataset = dataset_dao.build_dataset_object(dataset_dto) # TODO: design # Get server to do 'queries' search_index, err = dataset_dao.get_search_index(dataset_dto) if search_index is None: msg_title = "Dataset not ready perform search operation" raise falcon.HTTPConflict(title=msg_title, description=str(err)) # TODO: Maybe extract server management anywhere to simplify this search_server = server.Server(search_index) entity_x, entity_y = entities_pair id_x = dataset.get_entity_id(entity_x) id_y = dataset.get_entity_id(entity_y) if id_x is None or id_y is None: raise falcon.HTTPNotFound( description=("The {} id from entity {} or the {} id from {} " "entity can't be found on the dataset") .format(id_x, entity_x, id_y, entity_y)) dist = search_server.distance_between_entities(id_x, id_y) resp.body = json.dumps({"distance": dist}) resp.content_type = 'application/json' resp.status = falcon.HTTP_200
def on_post(self, req, resp, dataset_id, dataset_dto, entities): """Get the embedding given an entity or a list of entities (URI) {"entities": ["Q1492", "Q2807", "Q1"]} :param integer dataset_id: Unique ID of dataset :param integer dataset_dto: Dataset DTO (from hook) :param list entities: List of entities to get embeddings (from hook) :returns: A list of list with entities and its embeddings :rtype: list """ istrained = dataset_dto.is_trained() if istrained is None or not istrained: raise falcon.HTTPConflict( title="Dataset has not a valid state", description="Dataset {} has a {} state".format( dataset_id, dataset_dto.status)) try: result = async_tasks.find_embeddings_on_model(dataset_id, entities) except OSError as err: filerr = err.filename raise falcon.HTTPNotFound( title="The file on database couldn't be located", description=("A file ({}) has been found on database, but it " "does not exist on filesystem").format(filerr)) textbody = {"embeddings": result} resp.body = json.dumps(textbody) resp.status = falcon.HTTP_200
def on_get(self, req, resp, algorithm_id): """Shows the representation of the selected algorithm :param int algorithm_id: The id of the algorithm """ algorithm_dao = data_access.AlgorithmDAO() algorithm, err = algorithm_dao.get_algorithm_by_id(algorithm_id) if algorithm is None: raise falcon.HTTPNotFound(message=str(err)) resp.body = json.dumps(algorithm) resp.content_type = 'application/json' resp.status = falcon.HTTP_200
def on_get(self, req, resp): """Shows the representation of all algorithms available """ algorithm_dao = data_access.AlgorithmDAO() algorithms, err = algorithm_dao.get_all_algorithms() if algorithms is None: raise falcon.HTTPNotFound(message=str(err)) resp.body = json.dumps(algorithms) resp.content_type = 'application/json' resp.status = falcon.HTTP_200
def on_post(self, req, resp, dataset_id, dataset_dto): """Generates a search index to perform data lookups operations. This task may take long time to complete, so it uses tasks. :query int n_trees: The number of trees generated :param id dataset_id: The dataset to insert triples into :param DTO dataset_dto: The Dataset DTO from dataset_id (from hook) """ # Dig for the param on Query Params n_trees = req.get_param_as_int('n_trees') # Call to the task task = async_tasks.build_search_index.delay(dataset_id, n_trees) # Create the new task task_dao = data_access.TaskDAO() task_obj, err = task_dao.add_task_by_uuid(task.id) if task_obj is None: raise falcon.HTTPNotFound(description=str(err)) task_obj["next"] = "/datasets/" + dataset_id task_dao.update_task(task_obj) msg = "Task {} created successfuly".format(task_obj['id']) textbody = {"status": 202, "message": msg} resp.location = "/tasks/" + str(task_obj['id']) resp.body = json.dumps(textbody) resp.content_type = 'application/json' resp.status = falcon.HTTP_202
def on_post(self, req, resp, dataset_id, dataset_dto): """Generates an autocomplete index with desired lang This request may take long time to complete, so it uses tasks. :query list langs: A list with languages to be requested :param id dataset_id: The dataset to insert triples into :param DTO dataset_dto: The Dataset DTO from dataset_id (from hook) """ try: body = common_hooks.read_body_as_json(req) languages = body['langs'] if not isinstance(languages, list): raise falcon.HTTPInvalidParam( ("A list with languages in ISO 639-1 code was expected"), "langs") except KeyError as err: raise falcon.HTTPMissingParam("langs") entity_dao = data_access.EntityDAO(dataset_dto.dataset_type, dataset_id) # Call to the task task = async_tasks.build_autocomplete_index.delay(dataset_id, langs=languages) # Create the new task task_dao = data_access.TaskDAO() task_obj, err = task_dao.add_task_by_uuid(task.id) if task_obj is None: raise falcon.HTTPNotFound(description=str(err)) task_obj["next"] = "/datasets/" + dataset_id task_dao.update_task(task_obj) msg = "Task {} created successfuly".format(task_obj['id']) textbody = {"status": 202, "message": msg} resp.location = "/tasks/" + str(task_obj['id']) resp.body = json.dumps(textbody) resp.content_type = 'application/json' resp.status = falcon.HTTP_202
def on_get(self, req: Request, res: Response, item_id): with self.make_session() as session: item = self.get_item(item_id, session) if item is None: raise falcon.HTTPNotFound() put_json_to_context(res, item.to_dict())
def on_patch(self, req: Request, res: Response, item_id): with self.make_session() as session: try: ok = self.update_item(item_id, req.context["doc"], session) if not ok: raise falcon.HTTPNotFound() except IntegrityError as e: raise falcon.HTTPConflict("Conflict", str(e))
def on_delete(self, req: Request, res: Response, item_id): with self.make_session() as session: try: ok = self.delete_item(item_id, session) if not ok: raise falcon.HTTPNotFound() except IntegrityError as e: raise falcon.HTTPConflict("Conflict", str(e))
def __call__(self, req, resp, filepath): resp.content_type = mimetypes.guess_type(filepath)[0] curr_dir = os.path.dirname(os.path.abspath(__file__)) file_path = os.path.join( os.path.join(curr_dir, self.static_path), filepath ) if not os.path.exists(file_path): raise falcon.HTTPNotFound() else: resp.stream = open(file_path, 'rb') resp.stream_len = os.path.getsize(file_path)
def process(self, req, resp): if req.method == 'OPTIONS': if self.cors_origin is not False: self.process_preflight_request(req, resp) response_body = '\n' response_body += 'nothing here\n\n' resp.body = response_body resp.status = falcon.HTTP_200 return try: if self.cors_origin is not False: self.process_preflight_request(req, resp) self.dispatch(req, resp) except Exception as e: self.log.error_trace('process failed') error_type = type(e) error_map = { falcon.errors.HTTPNotFound: http_falcon_handler, falcon.errors.HTTPMissingParam: http_falcon_handler, falcon.errors.HTTPInvalidParam: http_falcon_handler, falcon.errors.HTTPInternalServerError: http_falcon_handler, } if self.custom_error_map: error_map.update(self.custom_error_map) error_func = error_map.get(error_type) if error_func: error_func(req, resp, e) else: default_error_handler(req, resp, e)
def __call__(self, req, resp, filename): file_ending = filename[filename.rfind('.'):] try: resp.content_type = env.mime_types[file_ending] with open('project/static/' + filename) as f: resp.body = f.read() except Exception: raise falcon.HTTPNotFound(description="404 Not Found")
def on_get(self, req, resp, plan_id): if plan_id.isdigit(): where = 'WHERE `plan`.`id` = %s' else: where = 'WHERE `plan`.`name` = %s AND `plan_active`.`plan_id` IS NOT NULL' query = single_plan_query + where connection = db.engine.raw_connection() cursor = connection.cursor(db.dict_cursor) cursor.execute(query, plan_id) plan = cursor.fetchone() if plan: step = 0 steps = [] cursor.execute(single_plan_query_steps, plan['id']) for notification in cursor: s = notification['step'] if s != step: l = [notification] steps.append(l) step = s else: l.append(notification) plan['steps'] = steps if plan['tracking_template']: plan['tracking_template'] = ujson.loads(plan['tracking_template']) resp.body = ujson.dumps(plan) connection.close() else: connection.close() raise HTTPNotFound()
def on_get(self, req, resp, template_id): if template_id.isdigit(): where = 'WHERE `template`.`id` = %s' else: where = 'WHERE `template`.`name` = %s AND `template_active`.`template_id` IS NOT NULL' query = single_template_query + where connection = db.engine.raw_connection() cursor = connection.cursor() cursor.execute(query, template_id) results = cursor.fetchall() if results: r = results[0] t = { 'id': r[0], 'name': r[1], 'active': r[2], 'creator': r[3], 'created': r[4] } content = {} for r in results: content.setdefault(r[5], {})[r[6]] = {'subject': r[7], 'body': r[8]} t['content'] = content cursor = connection.cursor(db.dict_cursor) cursor.execute(single_template_query_plans, t['name']) t['plans'] = cursor.fetchall() connection.close() payload = ujson.dumps(t) else: raise HTTPNotFound() resp.status = HTTP_200 resp.body = payload
def on_delete(self, req, resp, username, src_mode_name): ''' Delete a reprioritization mode for a user's mode setting **Example request**: .. sourcecode:: http DELETE /v0/users/reprioritization/{username}/{src_mode_name} HTTP/1.1 **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json [] ''' with db.guarded_session() as session: affected_rows = session.execute(delete_reprioritization_settings_query, { 'target_name': username, 'mode_name': src_mode_name, }).rowcount if affected_rows == 0: raise HTTPNotFound() session.commit() session.close() resp.status = HTTP_200 resp.body = '[]'
def on_get(self, req, resp): ''' Healthcheck endpoint. Returns contents of healthcheck file. **Example request**: .. sourcecode:: http GET /v0/healthcheck HTTP/1.1 **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: text/plain GOOD ''' try: with open(self.healthcheck_path) as f: health = f.readline().strip() except: raise HTTPNotFound() resp.status = HTTP_200 resp.content_type = 'text/plain' resp.body = health
def not_found(message="The requested resource does not exist"): raise falcon.HTTPNotFound(description=message, code=falcon.HTTP_404)
def on_get(self, req, resp): if not self.healthcheck_path: logger.error('Healthcheck path not set') raise falcon.HTTPNotFound() try: with open(self.healthcheck_path) as f: health = f.readline().strip() except IOError: raise falcon.HTTPNotFound() resp.status = falcon.HTTP_200 resp.content_type = 'text/plain' resp.body = health
def _get_sink_responder(self, path): params = {} for pattern, sink in self._sinks: m = pattern.match(path) if m: params = m.groupdict() return sink, params, None, None else: raise HTTPNotFound()
def do_get(self, req, resp, hostname, asset_type): """Render ``unit`` type boot action assets for hostname. Get the boot action context for ``hostname`` from the database and render all ``unit`` type assets for the host. Validate host is providing the correct idenity key in the ``X-Bootaction-Key`` header. :param req: falcon request object :param resp: falcon response object :param hostname: URL path parameter indicating the calling host :param asset_type: Asset type to include in the response - ``unit``, ``file``, ``pkg_list``, ``all`` """ try: ba_ctx = self.state_manager.get_boot_action_context(hostname) except Exception as ex: self.logger.error( "Error locating boot action for %s" % hostname, exc_info=ex) raise falcon.HTTPNotFound() if ba_ctx is None: raise falcon.HTTPNotFound( description="Error locating boot action for %s" % hostname) BootactionUtils.check_auth(ba_ctx, req) asset_type_filter = None if asset_type == 'all' else asset_type try: task = self.state_manager.get_task(ba_ctx['task_id']) design_status, site_design = self.orchestrator.get_effective_site( task.design_ref) assets = list() ba_status_list = self.state_manager.get_boot_actions_for_node( hostname) for ba in site_design.bootactions: if hostname in ba.target_nodes: ba_status = ba_status_list.get(ba.name, None) action_id = ba_status.get('action_id') assets.extend( ba.render_assets( hostname, site_design, action_id, type_filter=asset_type_filter)) tarball = BootactionUtils.tarbuilder(asset_list=assets) resp.set_header('Content-Type', 'application/gzip') resp.set_header('Content-Disposition', "attachment; filename=\"%s-%s.tar.gz\"" % (hostname, asset_type)) resp.data = tarball resp.status = falcon.HTTP_200 return except Exception as ex: self.logger.debug("Exception in boot action API.", exc_info=ex) raise falcon.HTTPInternalServerError(str(ex))
def on_get(self, req, resp, task_id): """Return one task""" tdao = data_access.TaskDAO() task_obj, err = tdao.get_task_by_id(task_id) if task_obj is None: raise falcon.HTTPNotFound(description=str(err)) t_uuid = celery_server.app.AsyncResult(task_obj['celery_uuid']) task = {} task["state"] = t_uuid.state # task["is_ready"] = t_uuid.ready() task["id"] = task_obj["id"] try: if req.get_param_as_bool('get_debug_info'): task["debug"] = task_obj except Exception: pass if t_uuid.state == "SUCCESS": # Look if exists some next if "next" in task_obj and task_obj["next"] is not None: print("This task has next {}".format(task_obj["next"])) try: if req.get_param_as_bool('no_redirect'): resp.status = falcon.HTTP_200 else: resp.status = falcon.HTTP_303 except Exception: resp.status = falcon.HTTP_303 resp.location = task_obj["next"] elif t_uuid.state == "STARTED": # Get task progress and show to the user celery_uuid = "celery-task-progress-" + task_obj['celery_uuid'] redis = data_access.RedisBackend() task_progress = redis.get(celery_uuid) try: if "progress" in task_progress: task["progress"] = task_progress["progress"] except TypeError: pass resp.status = falcon.HTTP_200 elif t_uuid.state == "FAILURE": task["error"] = {"exception": str(t_uuid.result), "traceback": t_uuid.traceback} response = {"task": task} resp.body = json.dumps(response) resp.content_type = 'application/json'
def on_post(self, req, resp, dataset_id, dataset_dto, gen_triples_param): """Generates a task to insert triples on dataset. Async petition. Reads from body the parameters such as SPARQL queries {"generate_triples": { "graph_pattern": "<SPARQL Query (Where part)>", "levels": 2, "batch_size": 30000 # Optional } } :param id dataset_id: The dataset to insert triples into :param DTO dataset_dto: The Dataset DTO from dataset_id (from hook) :param dict gen_triples_param: Params to call generate_triples function (from hook) """ try: batch_size = gen_triples_param.pop("batch_size") except KeyError: batch_size = None # Launch async task task = async_tasks.generate_dataset_from_sparql.delay( dataset_id, gen_triples_param.pop("graph_pattern"), int(gen_triples_param.pop("levels")), batch_size=batch_size) # Create a new task task_dao = data_access.TaskDAO() task_obj, err = task_dao.add_task_by_uuid(task.id) if task_obj is None: raise falcon.HTTPNotFound(description=str(err)) task_obj["next"] = "/datasets/" + dataset_id task_dao.update_task(task_obj) # Store the task into DatasetDTO dataset_dao = data_access.DatasetDAO() dataset_dao.set_task(dataset_id, task_obj['id']) msg = "Task {} created successfuly".format(task_obj['id']) textbody = {"status": 202, "message": msg, "task": task_dao.task} resp.location = "/tasks/" + str(task_obj['id']) resp.body = json.dumps(textbody) resp.content_type = 'application/json' resp.status = falcon.HTTP_202
def dispatch(self, req, resp): base_before, base_after, base_excp, base_final = self.op_loader.load_base(self.specs) for uri_regex, spec in self.specs.items(): # try: route_signature = '/' + req.method.lower() + req.relative_uri if route_signature.find('?') > 0: route_signature = route_signature[:route_signature.find('?')] if type(uri_regex) == str: continue spec['route_signature'] = route_signature req.spec = copy.deepcopy(spec) match = uri_regex.match(route_signature) if match: handler, params, before, after, excp, final, mode = self.op_loader.load(req=req, spec=spec, matched_uri=match) handler_return = None try: if base_before: base_before(req=req, resp=resp, **params) if before: before(req=req, resp=resp, **params) if mode == 'raw': handler_return = handler(req=req, resp=resp) else: if mode == 'more': handler_return = handler(req=req, resp=resp, **params) else: handler_return = handler(**params) content_type = self.produces(spec.get('produces'), self.specs.get('produces')) self.process_response(req, resp, handler_return, content_type) if after: after(req=req, resp=resp, response=handler_return, **params) if base_after: base_after(req=req, resp=resp, **params) except Exception as e: throw_out = True if base_excp is not None: throw_out = base_excp(req=req, resp=resp, error=e) if excp is not None: throw_out = excp(req=req, resp=resp, error=e) if throw_out: raise e finally: if final: final(req=req, resp=resp, response=handler_return, **params) if base_final: base_final(req=req, resp=resp, **params) return # except falcon.HTTPInvalidParam as e: # self.log.error_trace("http invalid param: {}".format(e)) # raise e # except Exception as e: # self.log.error_trace("process error: {}".format(e)) # raise falcon.HTTPInternalServerError(title=str(type(e)), description=str(e)) self.log.info("url does not match any route signature or match error: {}".format(route_signature)) raise falcon.HTTPNotFound()