我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用django.db.models.Min()。
def getRegistrationTypesAveragesByYear(): srs = EventRegistration.objects.all() eligible_years = [x['event__year'] for x in srs.values('event__year').annotate(Count('event__year'))] eligible_years.sort() year_averages = [] for year in eligible_years: this_year_results = srs.filter(event__year=year).annotate( student=Case(When(registration__student=True,then=100),default=0,output_field=IntegerField()), door=Case(When(registration__payAtDoor=False,then=100),default=0,output_field=IntegerField()), droppedIn=Case(When(dropIn=True,then=100),default=0,output_field=IntegerField()), cancellation=Case(When(cancelled=True,then=100),default=0,output_field=IntegerField()), ).aggregate(Student=Avg('student'),Door=Avg('door'),DropIn=Avg('droppedIn'),Cancelled=Avg('cancellation'),year=Min('event__year')) year_averages.append(this_year_results) return year_averages
def get_queryset(self): """Get queryset.""" return self.model.objects.all() \ .only('id', 'salutation', 'name', 'email') \ .annotate( number_of_books=Count('books'), first_book_published_on=Min('books__publication_date'), last_book_published_on=Max('books__publication_date'), lowest_book_price=Min('books__price'), highest_book_price=Max('books__price'), average_book_price=Avg('books__price'), average_number_of_pages_per_book=Avg('books__pages'), number_of_books_sold=Count('books__order_lines'), total_amount_earned=Sum( 'books__order_lines__book__price' ) )
def is_admin(request): """ We will determine if the user is an administrator. Then we will return """ current_user = request.user results = user_groups.objects.filter(username_id=current_user.id).aggregate(Min('user_group_permission')) # ADMIN if results.values()[0] == 1: request.session['IS_ADMIN'] = 'TRUE' else: request.session['IS_ADMIN'] = 'FALSE' # Group Admin if results.values()[0] == 2: request.session['IS_GROUP_ADMIN'] = 'TRUE' else: request.session['IS_GROUP_ADMIN'] = 'FALSE' return
def _date_filter_response(self, field): query_dict = self._get_queryset_all().aggregate(Min(field), Max(field)) min_date = query_dict[field + '__min'] max_date = query_dict[field + '__max'] if isinstance(min_date, datetime.datetime): min_date = min_date.date() if isinstance(max_date, datetime.datetime): max_date = max_date.date() selected_dates = self.json_cfg.get('selected_filter_values', None) if selected_dates and not self.json_cfg.get('ignore_selected_values', False): selected_min_date = parse(selected_dates['min_date']).date() selected_max_date = parse(selected_dates['max_date']).date() reset_button = True else: selected_min_date = min_date selected_max_date = max_date reset_button = False return render_to_response('ajaxviews/_select_date_filter.html', { 'min_date': min_date, 'max_date': max_date, 'selected_min_date': selected_min_date, 'selected_max_date': selected_max_date, 'reset_button': reset_button, })
def test_changed_languages_since_revision(capfd, project0_disk, tp0): """Changed languages since a given revision""" # Everything for store in tp0.stores.all(): store.sync() rev = tp0.stores.aggregate( rev=Min('last_sync_revision'))['rev'] - 1 call_command('changed_languages', '--after-revision=%s' % rev) out, err = capfd.readouterr() assert out == u'language0,language1\n' # End revisions revision = Revision.get() unit = tp0.stores.first().units.first() unit.target = "NEW TARGET" unit.save() call_command('changed_languages', '--after-revision=%s' % revision) out, err = capfd.readouterr() assert out == u'language0\n'
def public_stats(request: HttpRequest) -> HttpResponse: """Display public galleries and archives stats.""" if not crawler_settings.urls.enable_public_stats: if not request.user.is_staff: raise Http404("Page not found") else: return render_error(request, "Page disabled by settings (urls: enable_public_stats).") stats_dict = { "n_archives": Archive.objects.filter(public=True).count(), "archive": Archive.objects.filter(public=True).filter(filesize__gt=0).aggregate( Avg('filesize'), Max('filesize'), Min('filesize'), Sum('filesize')), "n_tags": Tag.objects.filter(gallery_tags__public=True).distinct().count(), "top_10_tags": Tag.objects.filter(gallery_tags__public=True).distinct().annotate( num_archive=Count('gallery_tags')).order_by('-num_archive')[:10] } d = {'stats': stats_dict} return render(request, "viewer/public_stats.html", d)
def get_dated_items(self): items, qs, info = super().get_dated_items() if 'year' in info and info['year']: # Get the earliest date we have an Event for: date_min = Event.objects.aggregate(Min('date'))['date__min'] # Make it a 'yyyy-01-01' date: min_year_date = date_min.replace(month=1, day=1) if info['year'] < min_year_date: # The year we're viewing is before our minimum date, so 404. raise Http404(_("No %(verbose_name_plural)s available") % { 'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural) }) elif info['year'] == min_year_date: # This is the earliest year we have events for, so # there is no previous year. info['previous_year'] = None return items, qs, info # CLASSICAL WORK, DANCE PIECE, MOVIE AND PLAY LISTS/DETAILS.
def probability(request,ltlat, ltlong, rblat, rblong ,poslat, poslong, dayindex, hourindex): #dayindex starts with 1 for sunday, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#week-day loc = Point(float(poslong),float(poslat),srid=4326 ) #fixme: check that srid=4326 is right print(loc) result = Bikes.objects.filter(timestamp__week_day=dayindex).filter(timestamp__hour=hourindex).filter(bikes__gt=0)\ .extra({'date_found' : "date(timestamp)"}).values('date_found')\ .annotate(min_distance=Min(Distance('place_coords', loc))).order_by('min_distance') result_count = len(result) result_ranges = {} percentages = [0.25,0.50,0.75,0.90] p_ind = 0 for i in range(result_count): # this finds the minimum distance for which the percentages from the list are fullfiled for bike availability percentage_sum = (i+1)/result_count while p_ind < len(percentages): if percentages[p_ind] <= percentage_sum: result_ranges[str(percentages[p_ind])]=result[i]["min_distance"] p_ind+=1 else: break return HttpResponse(json.dumps(result_ranges), content_type='application/json')
def challenge_added(self, challenge): if self.challenge_set.count() > 1: if not self.min_score and not self.max_score: result = self.challenge_set.aggregate(Min('points'), Max('points')) if not result['points__min'] is result['points__max']: self.min_score = result['points__min'] self.max_score = result['points__max'] else: chal_p = challenge.points if chal_p > self.max_score: self.max_score = chal_p elif chal_p < self.min_score: self.min_score = chal_p self.save()
def remove_duplicate_renditions(apps, schema_editor): Rendition = apps.get_model('wagtailimages.Rendition') # Find all filter_id / image_id pairings that appear multiple times in the renditions table # with focal_point_key = NULL duplicates = ( Rendition.objects.filter(focal_point_key__isnull=True). values('image_id', 'filter_id'). annotate(count_id=models.Count('id'), min_id=models.Min('id')). filter(count_id__gt=1) ) # Delete all occurrences of those pairings, except for the one with the lowest ID for duplicate in duplicates: Rendition.objects.filter( focal_point_key__isnull=True, image=duplicate['image_id'], filter=duplicate['filter_id'] ).exclude( id=duplicate['min_id'] ).delete()
def get_root_nodes(cls): """ :returns: A queryset containing the root nodes in the tree. This differs from the default implementation to find category page root nodes by `content_type`. """ content_type = ContentType.objects.get_for_model(cls) depth = (cls.objects.filter(content_type=content_type).aggregate( depth=models.Min('depth')))['depth'] if depth is not None: return cls.objects.filter(content_type=content_type, depth=depth) return cls.objects.filter(content_type=content_type)
def get_queryset(self): """Get queryset.""" return self.model.objects.all() \ .annotate( number_of_books=Count('books'), first_book_published_on=Min('books__publication_date'), last_book_published_on=Max('books__publication_date'), lowest_book_price=Min('books__price'), highest_book_price=Max('books__price'), average_book_price=Avg('books__price'), average_number_of_pages_per_book=Avg('books__pages'), number_of_books_sold=Count('books__order_lines'), total_amount_earned=Sum('books__order_lines__book__price') ) \ .values('id', 'salutation', 'name', 'email', 'number_of_books', 'first_book_published_on', 'last_book_published_on', 'lowest_book_price', 'highest_book_price', 'average_book_price', 'average_number_of_pages_per_book', 'number_of_books_sold', 'total_amount_earned')
def __iter__(self): times = sorted(set(itertools.chain(*self.slots_qs().values_list("start", "end")))) slots = Slot.objects.filter(pk__in=self.slots_qs().values("pk")) slots = slots.annotate(room_count=Count("slotroom"), order=Min("slotroom__room__order")) slots = slots.order_by("start", "order") row = [] for time, next_time in pairwise(times): row = {"time": time, "slots": []} for slot in slots: if slot.start == time: slot.rowspan = TimeTable.rowspan(times, slot.start, slot.end) slot.colspan = slot.room_count row["slots"].append(slot) if row["slots"] or next_time is None: yield row
def get_root_nodes(cls): content_type = ContentType.objects.get_for_model(cls) depth = ( cls.objects .filter(content_type=content_type) .aggregate(depth=models.Min('depth')))['depth'] if depth is not None: return cls.objects.filter(content_type=content_type, depth=depth) return cls.objects.filter(content_type=content_type)
def get_context_data(self, **kwargs): kwargs['query_data'] = self.request.GET.dict() status = self.request.GET.get('status') page = self.request.GET.get('page') live_courses = models.LiveCourse.objects.all() now = timezone.now() if status == 'to_start': live_courses = live_courses.annotate( start_time=Min("livecoursetimeslot__start")) live_courses = live_courses.filter(start_time__gt=now) elif status == 'under_way': live_courses = live_courses.annotate( start_time=Min("livecoursetimeslot__start")) live_courses = live_courses.annotate( end_time=Max("livecoursetimeslot__end")) live_courses = live_courses.filter(start_time__lte=now).filter( end_time__gte=now) elif status == 'end': live_courses = live_courses.annotate( end_time=Max("livecoursetimeslot__end")) live_courses = live_courses.filter(end_time__lt=now) live_courses = live_courses.order_by('-id') # paginate live_courses, pager = paginate(live_courses, page) kwargs['live_courses'] = live_courses kwargs['pager'] = pager return super(LiveCourseListView, self).get_context_data(**kwargs)
def get_queryset(self): queryset = self.queryset # ????????????? if self.action == 'list': queryset = queryset.annotate( start=Min('live_course__livecoursetimeslot__start'), end=Max('live_course__livecoursetimeslot__end'), ).filter(end__gt=timezone.now()).order_by('start') school_id = self.request.query_params.get('school') if school_id: queryset = queryset.filter(class_room__school_id=school_id) return queryset
def wd_manage_apply(request, month, year, contract): c = Contract.objects.get(id=int(contract), user=request.user) month = int(month) year = int(year) firstDayOfMonth = datetime(year, month, 1, 0, 0, 1, 0).weekday() daysInMonth = monthrange(year, month) workL = WorkLog.objects.get(contract=c, month=month, year=year) # First try apply all anual activities anuals = c.fixedworkdustactivity_set.all() for a in anuals: if a.week_day > firstDayOfMonth: anualStep = 1 + a.week_day - firstDayOfMonth elif a.week_day == firstDayOfMonth: anualStep = 1 else: anualStep = 1 + 7 - firstDayOfMonth + a.week_day while anualStep <= daysInMonth[1] and workL.calcHours() + a.avg_length <= c.hours: wt = WorkTime() wt.work_log = workL if a.avg_length >= 6: wt.pause = 1 else: wt.pause = 0 wt.begin = datetime(year, month, anualStep, a.start.hour, a.start.minute, 0, 0) beginstamp = (wt.begin - datetime(1970, 1, 1)).total_seconds() wt.end = datetime.fromtimestamp(beginstamp + a.avg_length * 60.0*60.0 + wt.pause * 60.0*60.0) # wt.end = wt.begin.replace(hour=int(wt.begin.hour + math.floor(a.avg_length) + wt.pause)) # wt.end = wt.end.replace(minute=int(round((a.avg_length - math.floor(a.avg_length)) * 60))) wt.activity = a.description wt.clean_fields(year, month) wt.save() anualStep += 7 # Then fill with "other" activities filler = FillerWorkDustActivity.objects.all() largestFreeSlot = 0 smallestFiller = filler.aggregate(Min('avg_length'))['avg_length__min'] while not smallestFiller == None and largestFreeSlot >= smallestFiller: pass return redirect("/?month=" + str(month) + "&year=" + str(year) + "#" + str(c.id))
def get_min_price(self): return self.itinerary_set.all().annotate(min_price=Min('pricingoption__price')).order_by( 'min_price').first()
def get_max_price(self): return self.itinerary_set.all().annotate(min_price=Min('pricingoption__price')).order_by( '-min_price').first()
def get_mean_price(self): return \ self.itinerary_set.all().annotate(min_price=Min('pricingoption__price')).aggregate(Avg('min_price'))[ 'min_price__avg']
def relativeToDbTimestamp(self, relativeValue): try: pingProbes = PingTestResult.objects key, latestPingProbe = pingProbes.aggregate(Max('pingStart')).popitem() key, firstPingProbe = pingProbes.aggregate(Min('pingStart')).popitem() transferProbes = TransferTestResult.objects key, latestTransferProbe = transferProbes.aggregate(Max('transferStart')).popitem() key, firstTransferProbe = transferProbes.aggregate(Min('transferStart')).popitem() # in case of missing probes minTime = firstPingProbe maxTime = latestPingProbe if minTime is None or maxTime is None: minTime = firstTransferProbe maxTime = latestTransferProbe if minTime is None or maxTime is None: return datetime.datetime.utcfromtimestamp(0) try: if firstPingProbe > firstTransferProbe: minTime = firstTransferProbe maxTime = latestPingProbe if latestPingProbe < latestTransferProbe: maxTime = latestTransferProbe except: pass moment = time.mktime(minTime.timetuple()) + relativeValue * (time.mktime(maxTime.timetuple()) - time.mktime(minTime.timetuple())) return datetime.datetime.utcfromtimestamp(moment) except: return datetime.datetime.utcfromtimestamp(0)
def next_in_session(self): try: target_id = Session.objects.filter( id__gt=self.id, parent=self.parent_or_self, ).aggregate(Min('id'))['id__min'] return Session.objects.get(pk=target_id) except self.DoesNotExist: return None
def get_queryset(self, request): qs = super(ExecutableInlineT, self).get_queryset(request).annotate( libraries=Count('xdk_libraries'), max_version=Max('xdk_libraries__xdk_version'), min_version=Min('xdk_libraries__xdk_version') ) return qs
def get_queryset(self, request): qs = super(ExecutableAdmin, self).get_queryset(request) qs = qs.annotate( libraries=Count('xdk_libraries'), max_version=Max('xdk_libraries__xdk_version'), min_version=Min('xdk_libraries__xdk_version') ) qs = qs.prefetch_related('title').prefetch_related('title__game') return qs
def get_queryset(self): from .models import Publication return super().get_queryset()\ .filter(reading__start_date__isnull=False, reading__end_date__isnull=True)\ .annotate(min_start_date=Min('reading__start_date'))\ .order_by('min_start_date')
def to_json(self, queryset): """ Dump as a JSON object """ data = super(TableFilterActionDateRange, self).to_json(queryset) # additional data about the date range covered by the queryset's # records, retrieved from its <field> column data['min'] = queryset.aggregate(Min(self.field))[self.field + '__min'] data['max'] = queryset.aggregate(Max(self.field))[self.field + '__max'] # a range filter has a count of None, as the number of records it # will select depends on the date range entered and we don't know # that ahead of time data['count'] = None return data
def close_course(lessons): """return true if today date < date of first lesson""" first_lesson_date = lessons.aggregate(Min('date'))['date__min'] if first_lesson_date.date() < timezone.now().date(): return False else: return True
def save_model(self, request, obj, form, change): if not obj.pk: min_order = obj.__class__.objects.aggregate( models.Min(self.sortable)) try: next_order = min_order['%s__min' % self.sortable] - 1 except TypeError: next_order = 0 setattr(obj, self.sortable, next_order) super(SortableModelAdmin, self).save_model(request, obj, form, change)
def get_queryset(self): channel_id = self.kwargs['channel_id'] attempted_mastery_logs = MasteryLog.objects.filter(attemptlogs__isnull=False) query_node = ContentNode.objects.get(pk=self.kwargs['content_node_id']) if self.request.query_params.get('last_active_time'): # Last active time specified datetime_cutoff = parse(self.request.query_params.get('last_active_time')) else: datetime_cutoff = timezone.now() - datetime.timedelta(7) # Set on the kwargs to pass into the serializer self.kwargs['last_active_time'] = datetime_cutoff.isoformat() recent_content_items = ContentSummaryLog.objects.filter_by_topic(query_node).filter( Q(progress__gt=0) | Q(masterylogs__in=attempted_mastery_logs), user__in=list(get_members_or_user(self.kwargs['collection_kind'], self.kwargs['collection_id'])), end_timestamp__gte=datetime_cutoff).values_list('content_id', flat=True) if connection.vendor == 'postgresql': pks_with_unique_content_ids = ContentNode.objects.order_by('content_id').distinct('content_id').filter( channel_id=channel_id, content_id__in=recent_content_items).values_list('pk', flat=True) else: # note from rtibbles: # As good as either I or jamalex could come up with to ensure that we only return # unique content_id'ed ContentNodes from the coach recent report endpoint. # Would have loved to use distinct('content_id'), but unfortunately DISTINCT ON is Postgresql only pks_with_unique_content_ids = ContentNode.objects.filter( channel_id=channel_id, content_id__in=recent_content_items).values('content_id').order_by('lft').annotate( pk=Min('pk')).values_list('pk', flat=True) return ContentNode.objects.filter(pk__in=pks_with_unique_content_ids).order_by('lft')
def open_notifications(request): title = _('Open Notifications') notifications = (Notification.objects .pending() .annotate(min_ecn=Min('submission_forms__submission__ec_number')) .only('timestamp', 'type_id') .select_related('type') .prefetch_related( Prefetch('safetynotification', queryset= SafetyNotification.objects.only('safety_type')), Prefetch('answer', queryset= NotificationAnswer.objects .only('notification_id', 'is_rejected') ), Prefetch('submission_forms', queryset= SubmissionForm.unfiltered.only( 'project_title', 'german_project_title', 'submission_id', ).prefetch_related( Prefetch('submission', queryset=Submission.unfiltered.only('ec_number')), ).order_by('submission__ec_number') ), ) .order_by('min_ecn') ) stashed_notifications = DocStash.objects.filter( owner=request.user, group='ecs.notifications.views.create_notification', current_version__gte=0 ).order_by('-modtime') context = { 'title': title, 'notifs': notifications, 'stashed_notifications': stashed_notifications, } return render(request, 'notifications/list.html', context)
def AdvanceRegistrationDaysJSON(request): startDate = getDateTimeFromGet(request,'startDate') endDate = getDateTimeFromGet(request,'endDate') timeFilters = {} if startDate: timeFilters['dateTime__gte'] = startDate if endDate: timeFilters['dateTime__lte'] = endDate advance_days_sorted = sorted(Counter( Registration.objects.filter(**timeFilters).annotate( min_start=Min('eventregistration__event__startTime') ).annotate( advance=(TruncDate('dateTime') - TruncDate('min_start')) ).values_list( 'advance',flat=True) ).items()) results_list = [] cumulative = 0 total = sum([x[1] for x in advance_days_sorted]) for x in advance_days_sorted: cumulative += x[1] results_list.append({ 'days': x[0], 'count': x[1], 'cumulative': cumulative, 'pct': 100 * (x[1] / total), 'cumulative_pct': 100 * (cumulative / total) }) return JsonResponse(results_list,safe=False)
def get_form_kwargs(self, **kwargs): ''' Get the list of recent months and recent series to pass to the form ''' numMonths = 12 lastStart = Event.objects.annotate(Min('eventoccurrence__startTime')).order_by('-eventoccurrence__startTime__min').values_list('eventoccurrence__startTime__min',flat=True).first() if lastStart: month = lastStart.month year = lastStart.year else: month = timezone.now().month year = timezone.now().year months = [('',_('None'))] for i in range(0,numMonths): newmonth = (month - i - 1) % 12 + 1 newyear = year if month - i - 1 < 0: newyear = year - 1 newdate = datetime(year=newyear,month=newmonth,day=1) newdateStr = newdate.strftime("%m-%Y") monthStr = newdate.strftime("%B, %Y") months.append((newdateStr,monthStr)) cutoff = timezone.now() - timedelta(days=120) allEvents = Event.objects.filter(startTime__gte=cutoff).order_by('-startTime') kwargs = super(SendEmailView, self).get_form_kwargs(**kwargs) kwargs.update({ "months": months, "recentseries": [('','None')] + [(x.id,'%s %s: %s' % (month_name[x.month],x.year,x.name)) for x in allEvents], "customers": self.customers, }) return kwargs
def duration(self): """ Duration on which spans a qs of MailStatus :returns: a datetime.timedelta """ extrems = self.aggregate( end=models.Max('creation_date'), start=models.Min('creation_date')) if not extrems['end'] or not extrems['start']: # case of empty qs (None values) -> zero-timedelta return timedelta() else: return extrems['end'] - extrems['start']
def duration(self): """ Duration on which spans a qs of MailStatus :returns: a timedelta """ extrems = self.aggregate( end=models.Max('latest_status_date'), start=models.Min('first_status_date')) if not extrems['end'] or not extrems['start']: # case of empty qs (None values) -> zero-timedelta return timedelta() return extrems['end'] - extrems['start']
def with_bounds(self): return self.annotate( start=Min('statuses__creation_date'), end=Max('statuses__creation_date')).annotate( statuses_delta=ExpressionWrapper( F('end') - F('start'), output_field=models.DurationField() ))
def add_upcoming( queryset ): #{{{1 if queryset.model == Event: return queryset.annotate( upcoming = Min('dates__eventdate_date') ) else: raise RuntimeError('queryset.model was not Event')
def get_queryset(self): queryset = models.Match.objects if (settings.STECHEC_FIGHT_ONLY_OWN_CHAMPIONS and not self.request.user.is_staff): queryset = queryset.filter(author=self.request.user.id) queryset = (queryset.annotate(Max('matchplayer__score')) .annotate(Min('matchplayer__id'))) return queryset
def max_bounds(cls): cache_key = 'mapdata:max_bounds:%s:%s' % (cls.__name__, MapUpdate.current_cache_key()) result = cache.get(cache_key, None) if result is not None: return result result = cls.objects.all().aggregate(models.Min('left'), models.Min('bottom'), models.Max('right'), models.Max('top')) result = ((float(result['left__min']), float(result['bottom__min'])), (float(result['right__max']), float(result['top__max']))) cache.set(cache_key, result, 900) return result
def _get_oldest_time_last_used(self): """ Returns a datetime for the oldest time an enabled Filter was last used. """ default_queryset = self.get_queryset() enabled_filters = default_queryset.filter(enabled=True) aggregation = enabled_filters.aggregate(models.Min('last_used')) return aggregation['last_used__min']
def _update_range_bounds(self, measurements, interpolate): measurement_qs = models.Measurement.objects.filter(pk__in=measurements) values_qs = models.MeasurementValue.objects.filter(x__len=1).order_by('x') # capture lower/upper bounds of t values for all measurements trange = measurement_qs.aggregate( max_t=Max('measurementvalue__x'), min_t=Min('measurementvalue__x'), ) if trange['max_t']: self._max = min(trange['max_t'][0], self._max or sys.maxint) if trange['min_t']: self._min = max(trange['min_t'][0], self._min or -sys.maxint) # iff no interpolation, capture intersection of t values bounded by max & min m_inter = measurement_qs.exclude(assay__protocol__in=interpolate).prefetch_related( Prefetch('measurementvalue_set', queryset=values_qs, to_attr='values'), ) for m in m_inter: points = {p.x[0] for p in m.values if self._min <= p.x[0] <= self._max} if self._points is None: self._points = points elif self._points: self._points.intersection_update(points) if not self._points: # Adding warning as soon as no valid timepoints found self._export_errors.append( _('Including measurement %(type_name)s results in no valid export ' 'timepoints; consider excluding this measurement, or enable ' 'interpolation for the %(protocol)s protocol.') % { 'type_name': m.measurement_type.type_name, 'protocol': m.assay.protocol.name, } )
def x_range(self): """ Returns the bounding range of X-values used for all Measurements in the form. """ f = self.fields['measurement'] x_range = f.queryset.aggregate( max=Max('measurementvalue__x'), min=Min('measurementvalue__x') ) # can potentially get None if there are no values; use __getitem__ default AND `or [0]` x_max = x_range.get('max', [0]) or [0] x_min = x_range.get('min', [0]) or [0] # max and min are both still arrays, grab the first element return (x_min[0], x_max[0])
def minimos_por_comercio(request, anio, mes, quincena, region_id=None): return _agregado_por_comercio(request, anio, mes, quincena, region_id, Min, "minimos_")
def test_unicode_date(self): "Testing dates are converted properly, even on SpatiaLite. See #16408." founded = datetime(1857, 5, 23) mansfield = PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)', founded=founded) self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0]) self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def min_buy_rate(cls, pair): return cls.objects.filter( pair=pair, sale__gte=1 ).exclude( Q(cancel=True) | Q(completed=True) ).aggregate(Min('rate')).get('rate__min') or _Zero
def min_buy_rate_hour(cls, pair): return cls.last_24_hour().filter( pair=pair).filter(sale__gte=1 ).exclude( Q(cancel=True) | Q(completed=True) ).aggregate(Min('rate')).get('rate__min') or _Zero
def overview(request, username=None): ''' Shows a plot with the weight data More info about the D3 library can be found here: * https://github.com/mbostock/d3 * http://d3js.org/ ''' is_owner, user = check_access(request.user, username) template_data = {} min_date = WeightEntry.objects.filter(user=user).\ aggregate(Min('date'))['date__min'] max_date = WeightEntry.objects.filter(user=user).\ aggregate(Max('date'))['date__max'] if min_date: template_data['min_date'] = 'new Date(%(year)s, %(month)s, %(day)s)' % \ {'year': min_date.year, 'month': min_date.month, 'day': min_date.day} if max_date: template_data['max_date'] = 'new Date(%(year)s, %(month)s, %(day)s)' % \ {'year': max_date.year, 'month': max_date.month, 'day': max_date.day} last_weight_entries = helpers.get_last_entries(user) template_data['is_owner'] = is_owner template_data['owner_user'] = user template_data['show_shariff'] = is_owner template_data['last_five_weight_entries_details'] = last_weight_entries return render(request, 'overview.html', template_data)
def get_cheapest(self, currency): if currency not in CURRENCIES._choice_dict.values(): return None cheapest_product = None products = self.values("currency").annotate(price=models.Min("price")) for product in products: if product["currency"] == currency: cheapest_product = product break return cheapest_product