我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用dateutil.parser.parse()。
def elapsed_time(start, end): """Calculate the elapsed time for a service activity. Arguments: start (:py:class:`str`): The activity start time. end (:py:class:`str`): The activity end time. Returns: :py:class:`tuple`: The start and end times and humanized elapsed time. """ start_time = safe_parse(start) end_time = safe_parse(end) if start_time is None or end_time is None: logger.warning('failed to generate elapsed time') text = 'elapsed time not available' else: text = 'took {}'.format(naturaldelta(parse(end) - parse(start))) return to_utc_timestamp(start_time), to_utc_timestamp(end_time), text
def safe_parse(time): """Parse a string without throwing an error. Arguments: time (:py:class:`str`): The string to parse. Returns: :py:class:`datetime.datetime`: The parsed datetime. """ if time is None: return try: return parse(time) except (OverflowError, ValueError): pass
def occurred(at_): """Calculate when a service event occurred. Arguments: at_ (:py:class:`str`): When the event occurred. Returns: :py:class:`str`: The humanized occurrence time. """ try: occurred_at = parse(at_) except (TypeError, ValueError): logger.warning('failed to parse occurrence time %r', at_) return 'time not available' utc_now = datetime.now(tz=timezone.utc) try: return naturaltime((utc_now - occurred_at).total_seconds()) except TypeError: # at_ is a naive datetime return naturaltime((datetime.now() - occurred_at).total_seconds())
def calculate_timeout(http_date): """Extract request timeout from e.g. ``Retry-After`` header. Note: Per :rfc:`2616#section-14.37`, the ``Retry-After`` header can be either an integer number of seconds or an HTTP date. This function can handle either. Arguments: http_date (:py:class:`str`): The date to parse. Returns: :py:class:`int`: The timeout, in seconds. """ try: return int(http_date) except ValueError: date_after = parse(http_date) utc_now = datetime.now(tz=timezone.utc) return int((date_after - utc_now).total_seconds())
def index(): icao_identifier = request.args.get('airport') try: date = dateparser.parse(request.args.get('date', datetime.date.today().isoformat())).date() except ValueError: date = datetime.date.today() if icao_identifier: try: result = do_lookup(icao_identifier, date) except Exception as e: result = {'airport': icao_identifier, 'date': date.isoformat(), 'error': str(e)} return render_template('index.html', dev_mode=dev_mode, result=result), 400 else: result = None return render_template('index.html', dev_mode=dev_mode, result=result)
def lookup(): icao_identifier = request.form['airport'] if request.form['date']: try: date = dateparser.parse(request.form['date']).date() except ValueError: return "Unable to understand date %s" % request.form['date'], 400 else: date = datetime.date.today() try: result = do_lookup(icao_identifier, date) except Exception as e: return str(e), 400 except: flask.abort(500) return json.dumps(result)
def get_aggregated_feed(pages): """ Aggregates feeds give a list of pages and their ids. Input: A list of tuples Output: Combined list of posts sorted by timestamp """ data = list() for page_name, _id in pages: page_data = get_feed(_id) for data_dict in page_data: data_dict['source'] = page_name data.extend(page_data) data.sort(key=lambda x: parse(x['created_time']), reverse=True) return data
def deserialize_datetime(string): """ Deserializes string to datetime. The string should be in iso8601 datetime format. :param string: str. :type string: str :return: datetime. :rtype: datetime """ try: from dateutil.parser import parse return parse(string) except ImportError: return string
def add_emails(self, topic_ids): ''' add all emails and email_blobs ''' # loop over emails for email in range(0, self.num_emails): # loop over emails em = self.metadata[email] dtime_orig = dateparse(em['Date']) dtime_utc = dtime_orig.astimezone(pytz.utc) values = nparray([em['Subject'], em['From'], em['To'], em['Cc'], em['Bcc'], dtime_orig, dtime_utc]) values = nparray([value.replace("'", " ") if (value and isinstance(value, str)) else value for value in values]) rows = nparray(['subject', 'sender', 'receiver', 'cc', 'bcc', 'send_time', 'send_time_utc']) bool = nparray([True if a else False for a in values]) self.add_email('email', rows[bool], values[bool]) for idx2, t_id in enumerate(topic_ids): # loop over topics rows = nparray(['topic_id', 'topic_probability']) values = nparray([t_id, self.email_prob[idx2, email]]) self.add_blob('email_blob', rows, values)
def calculate_end_of_period(self): # The next payment date is not reliably set. # When a subscription is cancelled, we do not have access to it anymore... # So instead, keep the end_of_period attribute up to date. last_payment_date = self.last_payment_date if not last_payment_date: return parse("1970-01-01T00:00:00Z") rpd = next(filter( lambda pd: pd["type"] == enums.PaymentDefinitionType.REGULAR, self.plan["payment_definitions"] )) delta = get_frequency_delta(rpd["frequency"], int(rpd["frequency_interval"])) return last_payment_date + delta
def _date_filter_response(self, field): query_dict = self._get_queryset_all().aggregate(Min(field), Max(field)) min_date = query_dict[field + '__min'] max_date = query_dict[field + '__max'] if isinstance(min_date, datetime.datetime): min_date = min_date.date() if isinstance(max_date, datetime.datetime): max_date = max_date.date() selected_dates = self.json_cfg.get('selected_filter_values', None) if selected_dates and not self.json_cfg.get('ignore_selected_values', False): selected_min_date = parse(selected_dates['min_date']).date() selected_max_date = parse(selected_dates['max_date']).date() reset_button = True else: selected_min_date = min_date selected_max_date = max_date reset_button = False return render_to_response('ajaxviews/_select_date_filter.html', { 'min_date': min_date, 'max_date': max_date, 'selected_min_date': selected_min_date, 'selected_max_date': selected_max_date, 'reset_button': reset_button, })
def ajax_filter(self, opts, *args, **kwargs): # args = set() # args.update((Q(field__isnull=True) | Q(field__name='none'),)) filter_field = opts.get('filter_field', None) if isinstance(filter_field, tuple) and len(filter_field) == 2 and \ (filter_field[1] == 'exclude' or filter_field[1] == 'exclude_filter'): return self.filter(*args, **kwargs) if opts.get('selected_filter_index', -1) >= 0 and opts.get('selected_filter_values', None): filter_field = opts['filter_field'] if isinstance(filter_field, str): kwargs[filter_field + '__in'] = opts['selected_filter_values'] elif isinstance(filter_field, tuple) and len(filter_field) == 3: kwargs[filter_field[0] + '__in'] = opts['selected_filter_values'] elif isinstance(filter_field, tuple) and len(filter_field) == 2 and filter_field[1] == 'date': if opts['selected_filter_values'].get('min_date', None): kwargs[filter_field[0] + '__gte'] = parse(opts['selected_filter_values']['min_date']).date() if opts['selected_filter_values'].get('max_date', None): kwargs[filter_field[0] + '__lte'] = parse(opts['selected_filter_values']['max_date']).date() else: raise ImproperlyConfigured('filter field attribute needs to be a string or tuple.') if self.distinct_qs: return self.filter(*args, **kwargs).distinct() return self.filter(*args, **kwargs)
def modified_time(self, name): """ Returns the last modified time (as datetime object) of the file specified by name. """ file_data = self._check_file_exists(name) if file_data is None: return None else: return parse(file_data["modifiedDate"]) # def deconstruct(self): # """ # Handle field serialization to support migration # # """ # name, path, args, kwargs = \ # super(GoogleDriveStorage, self).deconstruct() # if self._service_email is not None: # kwargs["service_email"] = self._service_email # if self._json_keyfile_path is not None: # kwargs["json_keyfile_path"] = self._json_keyfile_path
def main(): import json import sys def cvt(t): return parse(t) thre = cvt('2015-08-14T11:57:00-05:00') with open(sys.argv[1]) as reader: for line in reader: data = json.loads(line) url = clean_url(data['data']['url1']) if not data['data']['_golden']: for judgment in data['results']['judgments']: if 'input' in judgment['data'] and cvt(judgment['created_at']) > thre: print '\t\t'.join([url, judgment['data']['input'], judgment['created_at'], str(judgment['id'])]) pass
def sort_commits(entity): """ Sort the retrieved commits according to their commit date from old to new. See entity configuration: gh_repo_path_codeblock___commits :param entity: An entity having "commits" as output parameter. :return: None """ if entity.output_parameters["commits"]: # parse commit date strings (ISO 8601) into a python datetime object (see http://stackoverflow.com/a/3908349) for commit in entity.output_parameters["commits"]: commit["commit_date"] = parser.parse(commit["commit_date"]) # sort commits (oldest commits first) entity.output_parameters["commits"] = sorted(entity.output_parameters["commits"], key=lambda c: c["commit_date"]) # convert commit dates back to string representation for commit in entity.output_parameters["commits"]: commit["commit_date"] = str(commit["commit_date"])
def _add_metadata(self, df_columns, item): metadata__timestamp = item["metadata__timestamp"] metadata__updated_on = item["metadata__updated_on"] metadata__enriched_on = dt.utcnow().isoformat() df_columns[Events.META_TIMESTAMP].append(metadata__timestamp) df_columns[Events.META_UPDATED_ON].append(metadata__updated_on) df_columns[Events.META_ENRICHED_ON].append(metadata__enriched_on) # If called after '__add_sh_info', item will already contain # 'grimoire_creation_date' if Events.GRIMOIRE_CREATION_DATE in item: creation_date = item[Events.GRIMOIRE_CREATION_DATE] else: creation_date = parser.parse(item['data']['AuthorDate']) df_columns[Events.GRIMOIRE_CREATION_DATE].append(creation_date) # Perceval fields df_columns[Events.PERCEVAL_UUID].append(item['uuid']) # TODO add other common fields as 'perceval version', 'tag', 'origin'...
def __add_commit_info(self, df_columns, item): commit_data = item["data"] repository = item["origin"] creation_date = parser.parse(commit_data['AuthorDate']) df_columns[Git.COMMIT_HASH].append(commit_data['commit']) df_columns[Git.COMMIT_ID].append(commit_data['commit']) df_columns[Git.COMMIT_EVENT].append(Git.EVENT_COMMIT) df_columns[Git.COMMIT_DATE].append(creation_date) df_columns[Git.COMMIT_OWNER].append(commit_data['Author']) df_columns[Git.COMMIT_COMMITTER].append(commit_data['Commit']) df_columns[Git.COMMIT_COMMITTER_DATE].append(parser.parse(commit_data['CommitDate'])) df_columns[Git.COMMIT_REPOSITORY].append(repository) if 'message' in commit_data.keys(): df_columns[Git.COMMIT_MESSAGE].append(commit_data['message']) else: df_columns[Git.COMMIT_MESSAGE].append('')
def _deserialize(self, value, attr, obj): """ Deserialize value as a Unix timestamp (in float seconds). Handle both numeric and UTC isoformat strings. """ if value is None: return None try: return float(value) except ValueError: parsed = parser.parse(value) if parsed.tzinfo: if parsed.utcoffset().total_seconds(): raise ValidationError("Timestamps must be defined in UTC") parsed = parsed.replace(tzinfo=None) return (parsed - TimestampField.EPOCH).total_seconds()
def __deserialize_date(self, string): """ Deserializes string to date. :param string: str. :return: date. """ try: from dateutil.parser import parse return parse(string).date() except ImportError: return string except ValueError: raise ApiException( status=0, reason="Failed to parse `{0}` into a date object".format(string) )
def __deserialize_datatime(self, string): """ Deserializes string to datetime. The string should be in iso8601 datetime format. :param string: str. :return: datetime. """ try: from dateutil.parser import parse return parse(string) except ImportError: return string except ValueError: raise ApiException( status=0, reason=( "Failed to parse `{0}` into a datetime object" .format(string) ) )
def get(self, request): try: last_query_time = parse(request.query_params.get('last_query_time')) except (TypeError, ValueError): last_query_time = cache.get('isDirty_query_time', (timezone.now() - timedelta(days=7))) url = settings.POND_URL + '/pond/pond/blocks/new/?since={}&using=default'.format(last_query_time.strftime('%Y-%m-%dT%H:%M:%S')) now = timezone.now() try: response = requests.get(url) response.raise_for_status() except Exception as e: return HttpResponseServerError({'error': repr(e)}) pond_blocks = response.json() is_dirty = update_request_states_from_pond_blocks(pond_blocks) cache.set('isDirty_query_time', now, None) # also factor in if a change in requests (added, updated, cancelled) has occurred since we last checked last_update_time = max(Request.objects.latest('modified').modified, UserRequest.objects.latest('modified').modified) is_dirty |= last_update_time >= last_query_time return Response({'isDirty': is_dirty})
def get_status(self, obj): status = 'NOT_ATTEMPTED' if self.get_completed(obj): return 'COMPLETED' if self.get_percent_completed(obj) > 0: return 'PARTIALLY-COMPLETED' if obj['aborted']: return 'ABORTED' if self.get_failed(obj): return 'FAILED' if obj['canceled']: return 'CANCELED' if not obj['canceled'] and not self.get_failed(obj): if timezone.make_aware(parse(obj['end'])) > timezone.now(): status = 'SCHEDULED' if timezone.make_aware(parse(obj['start'])) < timezone.now(): status = 'IN_PROGRESS' return status
def _row(self, item): row = [date_parse(item["created_at"]), item["mid"], item['user']['screen_name'], item['user']['followers_count'], item['user']['friends_count'], item['reposts_count'], ', '.join(topic[1:-1] for topic in self._regex_topic(item['text'])), item['in_reply_to_screen_name'] or '', 'http://m.weibo.cn/{}/{}'.format(item["user"]["idstr"], item["mid"]), item['text'], ] text_url = self._regex_links(item['text'])[:2] row.extend(text_url + [''] * (2 - len(text_url))) # adding two sample urls in retweeted_status text if 'retweeted_status' in item: row.extend([item['retweeted_status']['text']]) row.extend(self._regex_links(item['retweeted_status']['text'])[:2]) return row
def __deserialize_date(self, string): """ Deserializes string to date. :param string: str. :return: date. """ try: from dateutil.parser import parse return parse(string).date() except ImportError: return string except ValueError: raise ApiException( status=0, reason="Failed to parse `{0}` into a date object" .format(string) )
def __deserialize_datatime(self, string): """ Deserializes string to datetime. The string should be in iso8601 datetime format. :param string: str. :return: datetime. """ try: from dateutil.parser import parse return parse(string) except ImportError: return string except ValueError: raise ApiException( status=0, reason="Failed to parse `{0}` into a datetime object". format(string) )
def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): if not timestamp.tzinfo: timestamp = UTC.localize(timestamp) ns = (timestamp - EPOCH).total_seconds() * 1e9 if precision is None or precision == 'n': return ns elif precision == 'u': return ns / 1e3 elif precision == 'ms': return ns / 1e6 elif precision == 's': return ns / 1e9 elif precision == 'm': return ns / 1e9 / 60 elif precision == 'h': return ns / 1e9 / 3600 raise ValueError(timestamp)
def pack_trip(self, trip): """Finalize trip""" start_date = dateparse(trip['stoptimes'][0]['departure_date']) for stoptime in trip['stoptimes']: # gtfs needs stoptimes overlapping into the following day to have # times like 25:35:00 arrdelta = (dateparse(stoptime['arrival_date']) - start_date).days depdelta = ( dateparse(stoptime['departure_date']) - start_date).days if arrdelta > 0: stoptime['arrival_date'] = start_date stoptime['arrival_time'] = str(int(stoptime['arrival_time'].split(':')[0]) + 24 * arrdelta) + ":" + stoptime[ 'arrival_time'].split(':')[1] if depdelta > 0: stoptime['departure_date'] = start_date stoptime['departure_time'] = str(int(stoptime['departure_time'].split(':')[0]) + 24 * depdelta) + ":" + stoptime[ 'departure_time'].split(':')[1] trip['headsign'] = self.stops[trip['stoptimes'][-1]['stop_id']]['stop_name'] trip['service_date'] = start_date
def is_valid_date(self, ignore_none=True): def check_is_valid_date(func_name, value): if ignore_none and value is None: return None if isinstance(value, (datetime.date, pd.Timestamp)): return if isinstance(value, six.string_types): try: v = parse_date(value) except ValueError: raise RQInvalidArgument( _('function {}: invalid {} argument, expect a valid date, got {} (type: {})').format( func_name, self._arg_name, value, type(value) )) raise RQInvalidArgument( _('function {}: invalid {} argument, expect a valid date, got {} (type: {})').format( func_name, self._arg_name, value, type(value) )) self._rules.append(check_is_valid_date) return self
def is_valid_date(self, ignore_none=True): def check_is_valid_date(func_name, value): if ignore_none and value is None: return None if isinstance(value, (datetime.date, pd.Timestamp)): return if isinstance(value, six.string_types): try: v = parse_date(value) return except ValueError: raise RQInvalidArgument( _(u"function {}: invalid {} argument, expect a valid date, got {} (type: {})").format( func_name, self._arg_name, value, type(value) )) raise RQInvalidArgument( _(u"function {}: invalid {} argument, expect a valid date, got {} (type: {})").format( func_name, self._arg_name, value, type(value) )) self._rules.append(check_is_valid_date) return self
def __init__(self, msg): self.original_msg = msg if('message' in msg): self.isError = True self.message = msg['message'] else: self.isError = False self.id = msg['id'] self.price = float(msg['price']) self.size = float(msg['size']) self.product_id = msg['product_id'] self.side = msg['side'] self.stp = msg.get('stp') self.type = msg['type'] self.time_in_force = msg.get('time_in_force') self.post_only = bool(msg.get('post_only')) self.created_at = parser.parse(msg['created_at']) self.fill_fees = float(msg['fill_fees']) self.filled_size = float(msg['filled_size']) self.executed_value = float(msg['executed_value']) self.status = msg['status'] self.settled = bool(msg['settled'])
def start(self): global TIME_TO_FREEZE global TZ_OFFSET self.previous_time_to_freeze = TIME_TO_FREEZE self.previous_tz_offset = TZ_OFFSET if isinstance(self.time_to_freeze, original_date): TIME_TO_FREEZE = self.time_to_freeze # Convert to a naive UTC datetime if necessary if TIME_TO_FREEZE.tzinfo: TIME_TO_FREEZE = TIME_TO_FREEZE.astimezone(utc).replace(tzinfo=None) else: TIME_TO_FREEZE = parser.parse(self.time_to_freeze) TZ_OFFSET = self.tz_offset return self.time_to_freeze
def _parse_arguments(self, parsed_args): params = dict() if parsed_args.start: params['start_date'] = parser.parse(parsed_args.start) if parsed_args.end: params['end_date'] = parser.parse(parsed_args.end) if parsed_args.flavor: params['flavor'] = parsed_args.flavor if parsed_args.name: params['name'] = parsed_args.name if len(params) == 0: raise RuntimeError('At least one argument must be provided: start, end, flavor or name') return params
def make_analyzed_tickets(AnalyzedAgileTicket, datetime, tzutc): """Make ticket from a list of dicts with key data.""" from dateutil.parser import parse default = datetime(1979, 8, 15, 0, 0, 0, tzinfo=tzutc) def _make_analyzed_tickets(ticket_datas): tickets = [] for data in ticket_datas: t = AnalyzedAgileTicket( key=data['key'], committed=dict(state="Committed", entered_at=parse(data['committed'], default=default)), started=dict(state="Started", entered_at=parse(data['started'], default=default)), ended=dict(state="Ended", entered_at=parse(data['ended'], default=default)) ) tickets.append(t) return tickets return _make_analyzed_tickets
def weeks_of_tickets(datetime, tzutc, AnalyzedAgileTicket): """A bunch of tickets.""" from dateutil.parser import parse parsed = [] default = datetime(1979, 8, 15, 0, 0, 0, tzinfo=tzutc) current_path = path.dirname(path.abspath(__file__)) csv_file = path.join(current_path, 'data', 'weeks_of_tickets.csv') count = 1 for row in csv.DictReader(open(csv_file, 'r')): t = AnalyzedAgileTicket( key="FOO-{}".format(count), committed=dict(state="committed", entered_at=parse(row['committed'], default=default)), started=dict(state="started", entered_at=parse(row['started'], default=default)), ended=dict(state="ended", entered_at=parse(row['ended'], default=default)) ) parsed.append(t) count += 1 return parsed
def on_open(self) -> None: self.log(LogType.info, "-- Process Started! --") for product_id in self.product_manager.get_product_ids(): order_book = self.order_book_manager.get_order_book(product_id) orders = publicClient.getProductOrderBook(product=product_id, level=3) sequence_id = orders['sequence'] for side in ['bids', 'asks']: for raw_order in orders[side]: price = raw_order[0] qty = raw_order[1] order_id = raw_order[2] order = Order(product_id, sequence_id, OrderSide[side[:-1]], qty, price, order_id=order_id) order_book + order historical_orders = publicClient.getProductTrades(product=product_id) for historical_order in historical_orders: price = historical_order['price'] qty = historical_order['size'] side = self.map_trade_side_to_order_side(historical_order['side']) created_at = parser.parse(historical_order['time']) order = Order(product_id, sequence_id, side, qty, price, historical=True, order_type=OrderType.match, created_at=created_at) order_book + order
def load(self): """ Search for credentials in explicit environment variables. """ if self._mapping['access_key'] in self.environ: logger.info('Found credentials in environment variables.') fetcher = self._create_credentials_fetcher() credentials = fetcher(require_expiry=False) expiry_time = credentials['expiry_time'] if expiry_time is not None: expiry_time = parse(expiry_time) return RefreshableCredentials( credentials['access_key'], credentials['secret_key'], credentials['token'], expiry_time, refresh_using=fetcher, method=self.METHOD ) return Credentials( credentials['access_key'], credentials['secret_key'], credentials['token'], method=self.METHOD ) else: return None
def process_formdata(self, valuelist): if valuelist: date_str = ' '.join(valuelist) if not date_str: self.data = None raise ValidationError(self.gettext('Please input a date/time value')) parse_kwargs = self.parse_kwargs.copy() if 'default' not in parse_kwargs: try: parse_kwargs['default'] = self.default() except TypeError: parse_kwargs['default'] = self.default try: self.data = parser.parse(date_str, **parse_kwargs) except ValueError: self.data = None raise ValidationError(self.gettext('Invalid date/time input'))
def __init__(self, broker, filename='assets/bitstampUSD.csv.gz', start=None, stop=None): super(DataSourceBitStampFile,self).__init__(broker) # Are we fetching the lot of just a window out of it? self._ts_start = dt2epock(date_parse(start)) if start is not None else None self._ts_stop = dt2epock(date_parse(stop)) if stop is not None else None self._csv = self._openCsv(filename) self._fake_trade_id = 0 # Add an always-incrementing number of msec on to each integer ts_exec to # make a ts_update. This means no trades have identical ts_updates, which # raises merry hell in the algorithms. self._last_exec_ts = None self._fake_msec = None # Peek ahead at the first entry to get the start timestamp if self._ts_start is None: row = next(self._csv) self._ts_start = self._makeTradeFromRow(row).ts_update self._fp.seek(0)
def __deserialize_date(self, string): """Deserializes string to date. :param string: str. :return: date. """ try: from dateutil.parser import parse return parse(string).date() except ImportError: return string except ValueError: raise rest.ApiException( status=0, reason="Failed to parse `{0}` as date object".format(string) )
def __deserialize_datatime(self, string): """Deserializes string to datetime. The string should be in iso8601 datetime format. :param string: str. :return: datetime. """ try: from dateutil.parser import parse return parse(string) except ImportError: return string except ValueError: raise rest.ApiException( status=0, reason=( "Failed to parse `{0}` as datetime object" .format(string) ) )
def __init__(self, pod): self.original = pod metadata = pod.obj['metadata'] self.name = metadata['name'] self.namespace = metadata['namespace'] self.node_name = pod.obj['spec'].get('nodeName') self.status = pod.obj['status']['phase'] self.uid = metadata['uid'] self.selectors = pod.obj['spec'].get('nodeSelector', {}) self.labels = metadata.get('labels', {}) self.annotations = metadata.get('annotations', {}) self.owner = self.labels.get('owner', None) self.creation_time = dateutil_parse(metadata['creationTimestamp']) self.start_time = dateutil_parse(pod.obj['status']['startTime']) if 'startTime' in pod.obj['status'] else None # TODO: refactor requests = list(map(lambda c: c.get('resources', {}).get('requests', {}), pod.obj['spec']['containers'])) resource_requests = {} for d in requests: for k, v in d.items(): unitless_v = utils.parse_SI(v) resource_requests[k] = resource_requests.get(k, 0.0) + unitless_v self.resources = KubeResource(pods=1, **resource_requests)