我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pymongo.ASCENDING。
def get_all_achievements(event=None, show_disabled=False): """ Gets all of the achievements in the database. Args: event: Optional parameter to restrict which achievements are returned show_disabled: Boolean indicating whether or not to show disabled achievements. Returns: List of achievements from the database """ db = api.common.get_conn() match = {} if event is not None: match.update({'event': event}) if not show_disabled: match.update({'disabled': False}) return list(db.achievements.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def get_all_problems(category=None, show_disabled=False): """ Gets all of the problems in the database. Args: category: Optional parameter to restrict which problems are returned show_disabled: Boolean indicating whether or not to show disabled problems. Returns: List of problems from the database """ db = api.common.get_conn() match = {} if category is not None: match.update({'category': category}) if not show_disabled: match.update({'disabled': False}) return list(db.problems.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def update_ttl(ttl, ttl_index_name, index_field, coll): """Update or create time_to_live indexes. :param ttl: time to live in seconds. :param ttl_index_name: name of the index we want to update or create. :param index_field: field with the index that we need to update. :param coll: collection which indexes need to be updated. """ indexes = coll.index_information() if ttl <= 0: if ttl_index_name in indexes: coll.drop_index(ttl_index_name) return if ttl_index_name in indexes: return coll.database.command( 'collMod', coll.name, index={'keyPattern': {index_field: pymongo.ASCENDING}, 'expireAfterSeconds': ttl}) coll.create_index([(index_field, pymongo.ASCENDING)], expireAfterSeconds=ttl, name=ttl_index_name)
def get_transactions(self,from_date=None,to_date=None,only_new=True): """ Retrieve transactions for producing text file """ query = {} if only_new: query['plaid2text.pulled_to_file'] = False if from_date and to_date and (from_date < to_date): query['date'] = {'$gte':from_date,'$lte':to_date} elif from_date and not to_date: query['date'] = {'$gte':from_date} elif not from_date and to_date: query['date'] = {'$lte':to_date} transactions = self.account.find(query).sort('date',ASCENDING) return transactions
def get_risk_free_rate(self, start_date, end_date): mongo_dates = self._yield_curve['dates'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING) _dates = np.array([np.uint32(d['date']) for d in mongo_dates]) tenor = risk_free_helper.get_tenor_for(start_date, end_date) tenor = tenor[-1] + tenor[:-1] mongo_data = self._yield_curve[tenor].find({}, {"_id":0}) _table = np.array([d['data'] for d in mongo_data]) d = start_date.year * 10000 + start_date.month * 100 + start_date.day pos = _dates.searchsorted(d) if pos > 0 and (pos == len(_dates) or _dates[pos] != d): pos -= 1 while pos >= 0 and np.isnan(_table[pos]): # data is missing ... pos -= 1 return _table[pos]
def available_data_range(self, frequency): """ ?????????????? :param str frequency: ?????`1d` ?????, `1m` ?????? :return: (earliest, latest) """ if frequency == '1d': mongo_data = self._day_bars[self.INSTRUMENT_TYPE_MAP['INDX']]['000001.XSHG'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING) mongo_data = list(mongo_data) s, e = np.uint32(mongo_data[0]['date']), np.uint32(mongo_data[-1]['date']) return convert_int_to_date(s).date(), convert_int_to_date(e).date() if frequency == '1m': raise NotImplementedError
def get_symbol(self): # ???????? df = fc.get_stock_basics_data() for row in range(0, df.shape[0]): item = { 'code' : str(df.index[row]), 'name' : str(df.iat[row, 0]), 'industry' : str(df.iat[row, 1]), 'area' : str(df.iat[row, 2]), 'timeToMarket' : str(df.iat[row, 14]) } try: self.Symbol_Db['equity'].insert(item) except: pass self.Symbol_Db['equity'].ensure_index([('code', pymongo.ASCENDING)])
def get_all_problems_for_admin(**options): """Returns all problems. Args: **options: Options passed to query. Returns: A list of problem dictionaries. """ cursor = _db.problems.find( {}, sort=[('_id', pymongo.ASCENDING)], **options) problems = list(cursor) enhance_problems_for_admin(problems) return problems
def extract(self): # TODO : Should be an exporter plugin graph = { 'meta': {}, # self.__meta, 'properties': {} # self.__properties } graph['nodes'] = list() for v in self.__vertices.find().sort('id', pymongo.ASCENDING): v.pop("_id") # Remove MongoDB document ID graph['nodes'].append(v) graph['edges'] = list() for e in self.__edges.find().sort("src", pymongo.ASCENDING): e.pop("_id") # Remove MongoDB document ID graph['edges'].append(e) graph['tokens'] = list(); for t in self.__tokens.find().sort('id', pymongo.ASCENDING): t.pop("_id") # Remove MongoDB document ID t['id'] = str(t['id']) t['ts'] = time.mktime(t['ts'].timetuple()) graph['tokens'].append(t) return graph
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ???????????????????? recent_row = list(self.db[self.mongo_col].find({'title': {'$eq': None}}, projection=['created_at'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) if recent_row: self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s" % ( self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ???????????????????? recent_row = list(self.db[self.mongo_col].find({'title': {'$ne': None}}, projection=['created_at'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) if recent_row: self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s" % ( self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ?????????????????? recent_row = list(self.db[self.mongo_col].find(projection=['created_at', '_id'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s"%(self.recent+datetime.timedelta(hours=8)).__str__())
def get_comment_list(share_id, skip=0, limit=None): cursor = ShareCommentDocument.find({ 'share': DBRef( ShareDocument.meta['collection'], ObjectId(share_id) ) }).sort([('comment_time', pymongo.ASCENDING)]).skip(skip) if limit is not None: cursor = cursor.limit(limit) comment_list = yield ShareCommentDocument.to_list(cursor) for i, comment in enumerate(comment_list): comment['floor'] = skip + 1 + i comment['author'] = yield UserDocument.translate_dbref( comment['author'] ) if 'replyeder' in comment: comment['replyeder'] = yield UserDocument.translate_dbref( comment['replyeder'] ) raise gen.Return(comment_list)
def get_comment_list(topic_id, skip=0, limit=None): cursor = TopicCommentDocument.find({ 'topic': DBRef(TopicDocument.meta['collection'], ObjectId(topic_id)) }).sort([('comment_time', pymongo.ASCENDING)]).skip(skip) if limit is not None: cursor = cursor.limit(limit) comment_list = yield TopicCommentDocument.to_list(cursor) for i, comment in enumerate(comment_list): comment['floor'] = skip + 1 + i comment['author'] = yield UserDocument.translate_dbref( comment['author'] ) if 'replyeder' in comment: comment['replyeder'] = yield UserDocument.translate_dbref( comment['replyeder'] ) raise gen.Return(comment_list)
def select(self, count=None, conditions=None): if count: count = int(count) else: count = 0 if conditions: conditions = dict(conditions) conditions_name = ['types', 'protocol'] for condition_name in conditions_name: value = conditions.get(condition_name, None) if value: conditions[condition_name] = int(value) else: conditions = {} items = self.proxys.find(conditions, limit=count).sort( [("speed", pymongo.ASCENDING), ("score", pymongo.DESCENDING)]) results = [] for item in items: result = (item['ip'], item['port'], item['score']) results.append(result) return results
def list(self, request): await require(request, Permissions.view) possible_fields = [k.name for k in self._schema.keys] q = validate_query(request.GET, possible_fields) paging = calc_pagination(q, self._primary_key) filters = q.get('_filters') query = {} if filters: query = create_filter(filters, self._schema) sort_direction = ASCENDING if paging.sort_dir == ASC else DESCENDING cursor = (self._collection.find(query) .skip(paging.offset) .limit(paging.limit) .sort(paging.sort_field, sort_direction)) entities = await cursor.to_list(paging.limit) count = await self._collection.find(query).count() headers = {'X-Total-Count': str(count)} return json_response(entities, headers=headers)
def getJudgementDetail(): jd_collection = db.JudgmentDoc_isExactlySame query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING) idList = list(query) length = len(idList) collection = db.JudgementDetail query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(3) record_id = list(query) print(record_id) ii = idList.index(record_id[0]) for i in range(ii+1,ii+30000): print("%d/%d\t%s"%(i,length,idList[i]['Id'])) ret = JudgementDetail(idList[i]['Id']) if ret in err_code: print('err_code: %s'%ret) break return ret
def getJudgementDetail(): jd_collection = db.JudgmentDoc_isExactlySame query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING) idList = list(query) length = len(idList) collection = db.JudgementDetail query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(10) record_id = list(query) ii = 0 for id in idList: #[:10]: ii = ii + 1 print("%d/%d\t%s"%(ii,length,id['Id'])) ret = JudgementDetail(id['Id']) if ret in ['101','102','103','104','105','107','108','109','110','199']: break
def ensure_index(cls): super().ensure_index() if not cls.COLLECTION_NAME: return collection = cls.collection() collection.create_index( [ ("is_latest", pymongo.DESCENDING) ], name="index_latest", partialFilterExpression={"is_latest": True} ) collection.create_index( [ ("model_id", pymongo.ASCENDING), ("version", pymongo.ASCENDING) ], name="index_unique_version", unique=True )
def verify_batch(start = 0, limitN = 7000, verbose = False): from lmfdb import getDBconnection import pymongo C = getDBconnection() i = 0 bound = 0; label = None for curve in C.genus2_curves.curves.find().sort([("cond", pymongo.ASCENDING), ("label", pymongo.ASCENDING)]).limit(limitN).skip(start): label = curve['label'] Lhash = curve['Lhash']; q, rendo, reuler = verify_curve_lmfdb(label, Lhash) if not q: print "FAILED at label = %s" % label if verbose: print label, q i+=1 if int(100.0*i/limitN) >= bound: print "%s%%\t %s / 66158\t at label = %s" %(int(100.0*i/limitN), start + i, label) bound+=1 print "Done from %s to %s / 66158\t at label = %s" %(start + 1, start + i, label)
def update_setting(self, setting): try: self.setting_list.remove(setting) except ValueError: pass while len(self.setting_list) == 0: result = self.collection.find().sort('speed', pymongo.ASCENDING) for one in result: setting = { # 'proxy': one, 'cookies': "".join(random.sample(string.ascii_letters + string.digits, 11)), 'agent': random.choice(AGENTS_ALL) } # self.collection.remove(one) self.setting_list.append(setting) if len(self.setting_list) == 0: log.info('update setting failed,sleep....') time.sleep(self.db_cycle_time) else: log.info('update setting succeed,get new setting {}.'.format(len(self.setting_list)))
def update_setting(self, setting): try: self.setting_list.remove(setting) except ValueError: pass while len(self.setting_list) == 0: result = self.collection.find().sort('speed', pymongo.ASCENDING) for one in result: setting = { # 'proxy': one, 'cookies': "".join(random.sample(string.ascii_letters + string.digits, 11)), 'agent': random.choice(AGENTS_ALL) } self.setting_list.append(setting) if len(self.setting_list) == 0: log.info('update setting failed,sleep....') time.sleep(self.db_cycle_time) else: log.info('update setting succeed,get new setting {}.'.format(len(self.setting_list)))
def get_activities(self, count=10, conversation_id=None, simple=False): last_id = self._get_last_id() if count == -1: first_id = 0 else: first_id = last_id - count if first_id < 0: first_id = 0 if count == -1: count_index = 0 else: count_index = -count if conversation_id is None: #list = self._simplify_list(list(self.conversation_collection.find({'_id': {'$gt': first_id, '$lte': last_id}}).sort("_id", ASCENDING)), simple) return self._simplify_list(list(self.conversation_collection.find().sort("_id", ASCENDING)), simple)[count_index:] else: return self._simplify_list(list(self.conversation_collection.find({'conversation_id': conversation_id}).sort("_id", ASCENDING)), simple)[count_index:]
def getOneWeibo(): reason=['????','??','????????','????','??????','????','??????','????','????','?????'] db=get_db() cl=db['retweet'] rst=cl.find({'retweet':0}).sort("mid", pymongo.ASCENDING) for rt in rst: cl.update({'_id':rt['_id']},{'$set':{'retweet':1}}) print('update',rt['mid'],' retweet=1') if len(rt['url'])>4: print(rt['mid'],' too many users to follow,find another weibo') continue rt['reason']=reason[random.randint(0,len(reason)-1)] if rt['friend']>0: cl=db['follow'] rst=cl.find({'follow':1}) rint=random.randint(0,rst.count()-rt['friend']) for i in range(rint,rint+rt['friend']): rt['reason']=rt['reason']+'@'+rst[i]['nick']+' ' return rt return False
def _get_jobs(self, conditions): jobs = [] failed_job_ids = [] for document in self.collection.find(conditions, ['_id', 'job_state'], sort=[('next_run_time', ASCENDING)]): try: jobs.append(self._reconstitute_job(document['job_state'])) except: self._logger.exception('Unable to restore job "%s" -- removing it', document['_id']) failed_job_ids.append(document['_id']) # Remove all the jobs we failed to restore if failed_job_ids: self.collection.remove({'_id': {'$in': failed_job_ids}}) return jobs
def _create_unique_index(self): """Create an index based on raw data reference fields. Creates a compound index on the fields that contain the location of the raw data from which a document was derived. This prevents duplicate documents from being saved. """ data_key = _DISTILLERY_SETTINGS['RAW_DATA_KEY'] fields = [ 'BACKEND_KEY', 'WAREHOUSE_KEY', 'COLLECTION_KEY', 'DOC_ID_KEY' ] keys = ['%s.%s' % (data_key, _DISTILLERY_SETTINGS[field]) for field in fields] formatted_keys = [(key, pymongo.ASCENDING) for key in keys] return self._collection.create_index(formatted_keys, unique=True, sparse=True)
def rebuild_people_indexes(): indexes = [] # indexes.append(IndexModel('pid', name='_pid')) indexes.append(IndexModel('PersonNameLastName', name= '_LastName')) indexes.append(IndexModel('PersonNameFirstName', name= '_FirstName')) indexes.append(IndexModel('BirthPlace.Place', name= '_BirthPlace')) indexes.append(IndexModel('relatives.pid', name= '_RelativesPid')) # indexes.append(IndexModel('BirthDate', name= '_BirthDate')) indexes.append(IndexModel([('BirthDate.Year', ASCENDING), ('BirthDate.Month', ASCENDING), ('BirthDate.Day', ASCENDING)], name="_BirthDate")) mc[write_table].create_indexes(indexes)
def download_arch_security(): db = get_db() collection = db.arch_security_updates collection.create_index([('package', ASCENDING), ('announced_at', ASCENDING)], unique=True) for package, dt, source in rss_feed(): try: collection.insert_one({'package': package, 'announced_at': dt, 'source': source}) except DuplicateKeyError: return else: log.info('Identified Arch security update for {}, ' 'announced at {}', package, dt) yield (package, dt)
def init_db(self, sut_fuzzer_pairs): """ Creates an 'fuzzinator_issues' collection with appropriate indexes (if not existing already), and initializes a 'fuzzinator_stats' collection for (sut, fuzzer) pairs (with 0 exec and crash counts if not existing already). """ db = self._db issues = db.fuzzinator_issues issues.create_index([('sut', ASCENDING), ('id', ASCENDING)]) stats = db.fuzzinator_stats for sut, fuzzer in sut_fuzzer_pairs: if stats.find({'sut': sut, 'fuzzer': fuzzer}).count() == 0: stats.insert_one({'sut': sut, 'fuzzer': fuzzer, 'exec': 0, 'crashes': 0})
def get_api_exceptions(result_limit=50, sort_direction=pymongo.DESCENDING): """ Retrieve api exceptions. Args: result_limit: the maximum number of exceptions to return. sort_direction: pymongo.ASCENDING or pymongo.DESCENDING """ db = api.common.get_conn() results = db.exceptions.find({"visible": True}).sort([("time", sort_direction)]).limit(result_limit) return list(results)
def _index_list(key_or_list, direction=None): """Helper to generate a list of (key, direction) pairs. Takes such a list, or a single key, or a single key and direction. """ if direction is not None: return [(key_or_list, direction)] else: if isinstance(key_or_list, string_type): return [(key_or_list, ASCENDING)] elif not isinstance(key_or_list, (list, tuple)): raise TypeError("if no direction is specified, " "key_or_list must be an instance of list") return key_or_list
def QA_SU_save_stock_day(client=QA_Setting.client): stock_list = QA_fetch_get_stock_time_to_market() coll_stock_day = client.quantaxis.stock_day coll_stock_day.create_index( [("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] def __saving_work(code, coll_stock_day): try: QA_util_log_info( '##JOB01 Now Saving STOCK_DAY==== %s' % (str(code))) ref = coll_stock_day.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] if ref.count() > 0: # ????????????????????? ???????? ??????????? start_date = ref[ref.count() - 1]['date'] else: start_date = '1990-01-01' QA_util_log_info(' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' % (code, start_date, end_date)) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day(str(code), start_date, end_date, '00')[1::])) except: err.append(str(code)) for item in range(len(stock_list)): QA_util_log_info('The %s of Total %s' % (item, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str( float(item / len(stock_list) * 100))[0:4] + '%') __saving_work(stock_list.index[item], coll_stock_day) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_stock_xdxr(client=QA_Setting.client): client.quantaxis.drop_collection('stock_xdxr') stock_list = QA_fetch_get_stock_time_to_market() coll = client.quantaxis.stock_xdxr coll.create_index([('code', pymongo.ASCENDING), ('date', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB02 Now Saving XDXR INFO ==== %s' % (str(code))) try: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_xdxr(str(code)))) except: err.append(str(code)) for i_ in range(len(stock_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str( float(i_ / len(stock_list) * 100))[0:4] + '%') __saving_work(stock_list.index[i_], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_index_day(client=QA_Setting.client): __index_list = QA_fetch_get_stock_list('index') coll = client.quantaxis.index_day coll.create_index([('code', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): try: ref_ = coll.find({'code': str(code)[0:6]}) end_time = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] else: start_time = '1990-01-01' QA_util_log_info('##JOB04 Now Saving INDEX_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time)[1::])) except: err.append(str(code)) for i_ in range(len(__index_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str( float(i_ / len(__index_list) * 100))[0:4] + '%') __saving_work(__index_list.index[i_][0], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def _index_list(key_or_list, direction=None): """Helper to generate a list of (key, direction) pairs. Takes such a list, or a single key, or a single key and direction. """ if direction is not None: return [(key_or_list, direction)] else: if isinstance(key_or_list, basestring): return [(key_or_list, pymongo.ASCENDING)] elif not isinstance(key_or_list, (list, tuple)): raise TypeError("if no direction is specified, " "key_or_list must be an instance of list") return key_or_list
def _ensure_index(self): if not object.__getattribute__(self, "_ensured_index"): self._coll.chunks.ensure_index( [("files_id", ASCENDING), ("n", ASCENDING)], unique=True) object.__setattr__(self, "_ensured_index", True)
def __init__(self, rmpids, interval): """ Constructor for RateMyProfessors to set the RMP schools to request and the interval :param rmpids: **list** List of rmp ids to scrape for :param interval: **int** Seconds to wait in between scraping :return: """ threading.Thread.__init__(self) # Pass in a list that contains the the ids to fetch self.ids = rmpids # The amount of seconds to wait before scraping RMP again self.interval = interval # Establish db connection self.db = pymongo.MongoClient().ScheduleStorm log.info("Ensuring MongoDB indexes exist") self.db.RateMyProfessors.create_index( [("school", pymongo.ASCENDING)] ) self.db.RateMyProfessors.create_index( [("id", pymongo.ASCENDING), ("school", pymongo.ASCENDING)], unique=True )
def __init__(self, settings): super().__init__(settings) self.db = pymongo.MongoClient().ScheduleStorm self.db.UAlbertaProfessor.create_index([("uid", pymongo.ASCENDING)], unique=True)