我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用bson.objectid.ObjectId()。
def get(self, id=None, callback=None, alias=None, **kwargs): ''' Gets a single item of the current queryset collection using it's id. In order to query a different database, please specify the `alias` of the database to query. ''' from motorengine import Q if id is None and not kwargs: raise RuntimeError("Either an id or a filter must be provided to get") if id is not None: if not isinstance(id, ObjectId): id = ObjectId(id) filters = { "_id": id } else: filters = Q(**kwargs) filters = self.get_query_from_filters(filters) self.coll(alias).find_one(filters, callback=self.handle_get(callback))
def mongo_job_id_exists(job_id): """ Check if a job_id exists in MongoDB. This uses the `mongo` context object from Flask. Parameters ---------- job_id: str Returns ------- bool True if job_id exists. False otherwise. """ key = dict(_id=ObjectId(job_id)) count = mongo.db.jobs.count(key) return count == 1
def mongo_get_job(job_id): """ Get job object from MongoDB. This uses the `mongo` context object from Flask. Parameters ---------- job_id: str Returns ------- dict Job object """ key = dict(_id=ObjectId(job_id)) response = mongo.db.jobs.find_one(key) return response
def mongo_no_context_get_job(job_id): """ Get job object from MongoDB. This does not use context object from Flask. Parameters ---------- job_id: str Returns ------- dict Job object """ client = MongoClient(MONGO_URI) db = client[MONGO_DBNAME] key = dict(_id=ObjectId(job_id)) response = db.jobs.find_one(key) return response
def mongo_add_s3_file_key(job_id, s3_file_key): """ Adds 's3_file_key' key-value to job object in MongoDB. This uses the `mongo` context object from Flask. Parameters ---------- job_id: str s3_file_key: str Returns ------- None """ response = mongo.db.jobs.update_one({'_id': ObjectId(job_id)}, {'$set': {'s3_file_key': s3_file_key}}) return response
def save_to_database(list_id, scangroup_id): return db_connector.saveSingleUrl.s(list_id, scangroup_id) # state = db_connector.SaveScan(list_id, scangroup_id, urls) # # TODO The following is just error handling for the insert - will probably also have to be moved (statekeeping in MongoDB) # client = MongoClient(config.MONGODB_URL) # db = client['PrangerDB'] # if state.startswith('error'): # db.ScanGroup.update({'_id': ObjectId(scangroup_id)}, {'$set': {'state': "error during SaveScan - %s" % state}}) # print "error during SaveScan - %s" % state # elif state.startswith('success'): # db.ScanGroup.update({'_id': ObjectId(scangroup_id)}, {'$set': {'state': 'finish'}}) # db.ScanGroup.update({'_id': ObjectId(scangroup_id)}, {'$set': {'progress': "finish"}}) # db.ScanGroup.update({'_id': ObjectId(scangroup_id)}, {'$set':{'progress_timestamp': datetime.now().isoformat()}}, upsert=False) # else: # db.ScanGroup.update({'_id': ObjectId(scangroup_id)}, {'$set': {'state': 'unknown error during SaveScan: no status returned'}}) # print "unknown error during SaveScan: no status returned"
def DeleteList(self, token): sitestodelete = [] scanstodelete = [] listcursor = db.Listen.find({"token": token}, {"_id": 1}) the_list = listcursor.next() sites = db.Seiten.find({"list_id": ObjectId(the_list["_id"])}, {"_id": 1}) #Hier werden nur die Arrays gefüllt, weil ich mir unsicher bin, ob #löschen und iterieren über einen Cursor eine so gute Idee ist. for site in sites: sitestodelete.append(site["_id"]) scans = db.Scans.find({"site_id": ObjectId(site["_id"])}, {"_id": 1}) for scan in scans: scanstodelete.append(scan["_id"]) for scan in scanstodelete: db.Scans.remove({"_id": ObjectId(scan)}) for site in sitestodelete: db.Seiten.remove({"_id": ObjectId(site)}) db.ScanGroup.remove({"list_id": ObjectId(the_list["_id"])}) db.Listen.remove({"token": token}) #Funktioniert
def GetScanGroupsByList(self, listid): scangroups = [] scangroupsCursor = db.ScanGroup.find({'list_id': ObjectId(listid)}) for scangroup in scangroupsCursor: try: if not(scangroup["progress"] == "finish"): progtime = scangroup["progress_timestamp"] progtime_dt = datetime.strptime(progtime, "%Y-%m-%dT%H:%M:%S.%f") now_dt = datetime.now() delta = int((now_dt - progtime_dt).total_seconds()) ht = humanize_time(delta, 'seconds') flatten = lambda l: [item for sublist in l for item in sublist] delta_str = ' '.join(flatten(ht)) + " elapsed" scangroup["progress_timestamp_absolute"] = scangroup["progress_timestamp"] scangroup["progress_timestamp"] = delta_str except Exception as ex: print ex scangroups.append(scangroup) return scangroups #Funktioniert
def default(self, o): # for Enum Type if isinstance(o, enum.Enum): return o.value # for Enum Select Integer if isinstance(o, EnumInt): return o.key if isinstance(o, (datetime, date)): return o.isoformat() if isinstance(o, Decimal): return _number_str(o) if isinstance(o, ObjectId): return str(o) return super(JSONEncoder, self).default(o)
def get(self, uid): """ Get a workflow from the backing database (mongo), special uid=='all' returns the whole list. the workflow will be stored in the response as a json at the key workflow. If you ask for 'all' then there will be a field 'workflows' containing an array of workflows. """ print('flow called') client = db_client()[flowdb][flowcol] resp = newresp(request, uid) if uid == 'all': try: resp['workflows'] = map(serialize_id_key, client.find()) except Exception as ex: print(ex) return "failed builk read to mongo", 500 else: try: resp['workflow'] = serialize_id_key(client.find({'_id':ObjectId(uid)})[0]) except IndexError: return 'No such object %s'%uid, 404 resp['mesg'] = "You asked for %s" % uid return resp
def delete_article(user, list_id, article_id): # Retrieve the articled and list to be deleted try: # Check resource the_article = Article.objects.get(id=ObjectId(article_id)) the_list = List.objects.get(Q(id=ObjectId(list_id)) & Q(articles=the_article)) # Remove the article from the database Article.objects(id=the_article.id).delete() # Remove the vote as well Vote.objects(article=the_article, list=the_list).delete() except Exception as e: return type(e).__name__ the_list.reload() return the_list
def archive_article(user, list_id, article_id): # Retrieve the articled and list to be deleted try: # Check resource the_article = Article.objects.get(id=ObjectId(article_id)) the_list = List.objects.get(Q(id=ObjectId(list_id)) & Q(articles=the_article)) # Remove the article from the list List.objects(id=the_list.id).update_one(pull__articles=the_article) # Remove the vote as well Vote.objects(article=the_article, list=the_list).delete() except Exception as e: return type(e).__name__ the_list.reload() return the_list
def downvote_article(user, group_id, list_id, article_id): try: # Resources check article = Article.objects.get(id=ObjectId(article_id)) group = Group.objects.get(id=ObjectId(group_id), lists=ObjectId(list_id), members=user) list = List.objects.get(id=ObjectId(list_id), articles=article) # Create new vote vote = check_vote_exist(list, article) if check_user_has_downvoted(user, vote): raise UserHasVoted('User cannot vote twice.') # User is just trying to take vote back if check_user_has_upvoted(user, vote): Vote.objects(id=vote.id).update_one(pull__upvoter_list=user, vote_count=vote.vote_count-1) else: # Downvote Vote.objects(id=vote.id).update_one(push__downvoter_list=user, vote_count=vote.vote_count-1) except Exception as e: return type(e).__name__ vote.reload() return vote
def partition_user_list(user, old_list_id, new_list_name, articles): try: # Get list and create new list old_list = List.objects.get(id=ObjectId(old_list_id)) new_list = create_list(new_list_name, user) article_buffer = list() for a in articles: article_buffer.append(Article.objects.get(id=ObjectId(a))) # Add selected article into new list and remove from old list List.objects(id=new_list.id).update_one(add_to_set__articles=article_buffer) List.objects(id=old_list.id).update_one(pull_all__articles=article_buffer) except Exception as e: print(type(e).__name__) return type(e).__name__ old_list.reload() new_list.reload() return old_list, new_list
def share_article_to_group_list(user, base_list_id, article_id, group_id, target_list_id): try: # Check ownership base_list = List.objects.get(id=ObjectId(base_list_id)) target_list = List.objects.get(id=ObjectId(target_list_id)) article = Article.objects.get(id=ObjectId(article_id)) Group.objects.get(Q(id=ObjectId(group_id)) & Q(lists=target_list)) User.objects.get(Q(id=user.id) & Q(lists=base_list)) # Add article ito the list List.objects(id=target_list.id).update_one(push__articles=article) # Init the vote object as well Vote(article=article, list=target_list).save() except Exception as e: return type(e).__name__
def rejudge(record_id: objectid.ObjectId, enqueue: bool=True): coll = db.coll('record') doc = await coll.find_one_and_update(filter={'_id': record_id}, update={'$unset': {'judge_uid': '', 'judge_token': '', 'judge_at': '', 'compiler_texts': '', 'judge_texts': '', 'cases': ''}, '$set': {'status': constant.record.STATUS_WAITING, 'score': 0, 'time_ms': 0, 'memory_kb': 0, 'rejudged': True}}, return_document=ReturnDocument.AFTER) bus.publish_throttle('record_change', doc, doc['_id']) if enqueue: await queue.publish('judge', rid=doc['_id'])
def add(domain_id: str, content: str, owner_uid: int, doc_type: int, doc_id: convert_doc_id = None, parent_doc_type: int = None, parent_doc_id: convert_doc_id = None, **kwargs): """Add a document. Returns the document id.""" obj_id = objectid.ObjectId() coll = db.coll('document') doc = {'_id': obj_id, 'content': content, 'owner_uid': owner_uid, 'domain_id': domain_id, 'doc_type': doc_type, 'doc_id': doc_id or obj_id, **kwargs} if parent_doc_type or parent_doc_id: assert parent_doc_type and parent_doc_id doc['parent_doc_type'], doc['parent_doc_id'] = parent_doc_type, parent_doc_id await coll.insert_one(doc) return doc['doc_id']
def post(self, *, tid: objectid.ObjectId, title: str, content: str, dag: str, desc: str): tdoc = await training.get(self.domain_id, tid) if not self.own(tdoc, builtin.PERM_EDIT_TRAINING_SELF): self.check_perm(builtin.PERM_EDIT_TRAINING) dag = _parse_dag_json(dag) pids = self.get_pids({'dag': dag}) if not pids: # empty plan raise error.ValidationError('dag') pdocs = await problem.get_multi(domain_id=self.domain_id, doc_id={'$in': pids}, fields={'doc_id': 1, 'hidden': 1}) \ .sort('doc_id', 1) \ .to_list() exist_pids = [pdoc['doc_id'] for pdoc in pdocs] if len(pids) != len(exist_pids): for pid in pids: if pid not in exist_pids: raise error.ProblemNotFoundError(self.domain_id, pid) for pdoc in pdocs: if pdoc.get('hidden', False): self.check_perm(builtin.PERM_VIEW_PROBLEM_HIDDEN) await training.edit(self.domain_id, tdoc['doc_id'], title=title, content=content, dag=dag, desc=desc) self.json_or_redirect(self.reverse_url('training_detail', tid=tid))
def post(self, *, rid: objectid.ObjectId, score: int, message: str=''): rdoc = await record.get(rid) if rdoc['domain_id'] == self.domain_id: self.check_perm(builtin.PERM_REJUDGE) else: self.check_priv(builtin.PRIV_REJUDGE) await record.rejudge(rdoc['_id'], False) await record.begin_judge(rid, self.user['_id'], self.user['_id'], constant.record.STATUS_FETCHED) update = {'$set': {}, '$push': {}} update['$set']['status'] = constant.record.STATUS_ACCEPTED if score == 100 \ else constant.record.STATUS_WRONG_ANSWER update['$push']['cases'] = { 'status': update['$set']['status'], 'score': score, 'time_ms': 0, 'memory_kb': 0, 'judge_text': message, } await record.next_judge(rid, self.user['_id'], self.user['_id'], **update) rdoc = await record.end_judge(rid, self.user['_id'], self.user['_id'], update['$set']['status'], score, 0, 0) await _post_judge(self, rdoc) self.json_or_redirect(self.referer_or_main)
def get(self, *, tid: objectid.ObjectId, pid: document.convert_doc_id): uid = self.user['_id'] if self.has_priv(builtin.PRIV_USER_PROFILE) else None tdoc, pdoc = await asyncio.gather(contest.get(self.domain_id, tid), problem.get(self.domain_id, pid, uid)) tsdoc, udoc = await asyncio.gather( contest.get_status(self.domain_id, tdoc['doc_id'], self.user['_id']), user.get_by_uid(tdoc['owner_uid'])) attended = tsdoc and tsdoc.get('attend') == 1 if not self.is_done(tdoc): if not attended: raise error.ContestNotAttendedError(tdoc['doc_id']) if not self.is_live(tdoc): raise error.ContestNotLiveError(tdoc['doc_id']) if pid not in tdoc['pids']: raise error.ProblemNotFoundError(self.domain_id, pid, tdoc['doc_id']) path_components = self.build_path( (self.translate('contest_main'), self.reverse_url('contest_main')), (tdoc['title'], self.reverse_url('contest_detail', tid=tid)), (pdoc['title'], None)) self.render('problem_detail.html', tdoc=tdoc, pdoc=pdoc, tsdoc=tsdoc, udoc=udoc, attended=attended, page_title=pdoc['title'], path_components=path_components)
def post(self, *, tid: objectid.ObjectId, pid: document.convert_doc_id, lang: str, code: str): tdoc, pdoc = await asyncio.gather(contest.get(self.domain_id, tid), problem.get(self.domain_id, pid)) tsdoc = await contest.get_status(self.domain_id, tdoc['doc_id'], self.user['_id']) if not tsdoc or tsdoc.get('attend') != 1: raise error.ContestNotAttendedError(tdoc['doc_id']) if not self.is_live(tdoc): raise error.ContestNotLiveError(tdoc['doc_id']) if pid not in tdoc['pids']: raise error.ProblemNotFoundError(self.domain_id, pid, tdoc['doc_id']) rid = await record.add(self.domain_id, pdoc['doc_id'], constant.record.TYPE_SUBMISSION, self.user['_id'], lang, code, tid=tdoc['doc_id'], hidden=True) await contest.update_status(self.domain_id, tdoc['doc_id'], self.user['_id'], rid, pdoc['doc_id'], False, 0) if not self.can_show_record(tdoc): self.json_or_redirect(self.reverse_url('contest_detail', tid=tdoc['doc_id'])) else: self.json_or_redirect(self.reverse_url('record_detail', rid=rid))
def get_user_by_field(field, value): """ ??????????? :param field: ???????(id/user_id/email/name) :param value: ??? :return: ???? or None """ valid_fields = ['id', 'user_id', 'name', 'email'] if field not in valid_fields: return None if field == 'id': field = 'user_id' # value = ObjectId(value) user = User(**{field: value}) if user.user_id: return user return None
def detail(request, workflowId): # ??workflowId?db????? workflowDetail = get_object_or_404(workflow, pk=workflowId) workflowDetail.field_names = json.loads(workflowDetail.field_names) #?????????????????? _SUFFIX = workflowDetail.create_time.strftime("%Y%m") collection = mongodb.db[conf.get("mongo", 'collection') + "_" + _SUFFIX] # ?mongodb?????????? results_objectid = workflowDetail.results_objectid document = collection.find_one({'_id': ObjectId(results_objectid)}) query_results = json.loads(document['query_results']) context = { 'workflowDetail': workflowDetail, 'query_results': query_results, } return render(request, 'sqlquery/detail.html', context)
def _prepare_input(data, replace): if isinstance(data, dict): result = {} for key, val in data.items(): if not replace and (key.endswith('_id') or key.endswith('_ids')): result[key] = _prepare_input(val, True) else: result[key] = _prepare_input(val, replace) return result elif isinstance(data, list): return [_prepare_input(e, replace) for e in data] elif replace and isinstance(data, str): try: return ObjectId(data) except: return data return data
def _prepare(data, replace_objectid, replace_secret): if isinstance(data, dict): result = {} for key, val in data.items(): if not replace_secret and ('key' in key or 'password' in key): result[key] = _prepare(val, replace_objectid, True) else: result[key] = _prepare(val, replace_objectid, replace_secret) return result elif isinstance(data, list): return [_prepare(e, replace_objectid, replace_secret) for e in data] elif isinstance(data, ObjectId): if replace_objectid: return str(data) elif replace_secret: return 10*'*' return data
def transform_incoming(self, son, collection): """Add an _id field if it is missing. """ if not "_id" in son: son["_id"] = ObjectId() return son # This is now handled during BSON encoding (for performance reasons), # but I'm keeping this here as a reference for those implementing new # SONManipulators.
def _legacy_add_user(self, name, password, read_only, **kwargs): """Uses v1 system to add users, i.e. saving to system.users. """ # Use a Collection with the default codec_options. system_users = self._collection_default_options('system.users') user = system_users.find_one({"user": name}) or {"user": name} if password is not None: user["pwd"] = auth._password_digest(name, password) if read_only is not None: user["readOnly"] = read_only user.update(kwargs) # We don't care what the _id is, only that it has one # for the replace_one call below. user.setdefault("_id", ObjectId()) try: system_users.replace_one({"_id": user["_id"]}, user, True) except OperationFailure as exc: # First admin user add fails gle in MongoDB >= 2.1.2 # See SERVER-4225 for more information. if 'login' in str(exc): pass # First admin user add fails gle from mongos 2.0.x # and 2.2.x. elif (exc.details and 'getlasterror' in exc.details.get('note', '')): pass else: raise
def _insert_one( self, sock_info, doc, ordered, check_keys, manipulate, write_concern, op_id, bypass_doc_val): """Internal helper for inserting a single document.""" if manipulate: doc = self.__database._apply_incoming_manipulators(doc, self) if not isinstance(doc, RawBSONDocument) and '_id' not in doc: doc['_id'] = ObjectId() doc = self.__database._apply_incoming_copying_manipulators(doc, self) concern = (write_concern or self.write_concern).document acknowledged = concern.get("w") != 0 command = SON([('insert', self.name), ('ordered', ordered), ('documents', [doc])]) if concern: command['writeConcern'] = concern if sock_info.max_wire_version > 1 and acknowledged: if bypass_doc_val and sock_info.max_wire_version >= 4: command['bypassDocumentValidation'] = True # Insert command. result = sock_info.command(self.__database.name, command, codec_options=self.codec_options, check_keys=check_keys) _check_write_command_response([(0, result)]) else: # Legacy OP_INSERT. self._legacy_write( sock_info, 'insert', command, acknowledged, op_id, bypass_doc_val, message.insert, self.__full_name, [doc], check_keys, acknowledged, concern, False, self.codec_options) if not isinstance(doc, RawBSONDocument): return doc.get('_id')
def insert_one(self, document, bypass_document_validation=False): """Insert a single document. >>> db.test.count({'x': 1}) 0 >>> result = db.test.insert_one({'x': 1}) >>> result.inserted_id ObjectId('54f112defba522406c9cc208') >>> db.test.find_one({'x': 1}) {u'x': 1, u'_id': ObjectId('54f112defba522406c9cc208')} :Parameters: - `document`: The document to insert. Must be a mutable mapping type. If the document does not have an _id field one will be added automatically. - `bypass_document_validation`: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. :Returns: - An instance of :class:`~pymongo.results.InsertOneResult`. .. seealso:: :ref:`writes-and-ids` .. note:: `bypass_document_validation` requires server version **>= 3.2** .. versionchanged:: 3.2 Added bypass_document_validation support .. versionadded:: 3.0 """ common.validate_is_document_type("document", document) if not (isinstance(document, RawBSONDocument) or "_id" in document): document["_id"] = ObjectId() with self._socket_for_writes() as sock_info: return InsertOneResult( self._insert(sock_info, document, bypass_doc_val=bypass_document_validation), self.write_concern.acknowledged)
def _merge_legacy(run, full_result, result, index): """Merge a result from a legacy opcode into the full results. """ affected = result.get('n', 0) errmsg = result.get("errmsg", result.get("err", "")) if errmsg: # wtimeout is not considered a hard failure in # MongoDB 2.6 so don't treat it like one here. if result.get("wtimeout"): error_doc = {'errmsg': errmsg, 'code': _WRITE_CONCERN_ERROR} full_result['writeConcernErrors'].append(error_doc) else: code = result.get("code", _UNKNOWN_ERROR) error = _make_error(run.index(index), code, errmsg, run.ops[index]) if "errInfo" in result: error["errInfo"] = result["errInfo"] full_result["writeErrors"].append(error) return if run.op_type == _INSERT: full_result['nInserted'] += 1 elif run.op_type == _UPDATE: if "upserted" in result: doc = {_UINDEX: run.index(index), _UID: result["upserted"]} full_result["upserted"].append(doc) full_result['nUpserted'] += affected # Versions of MongoDB before 2.6 don't return the _id for an # upsert if _id is not an ObjectId. elif result.get("updatedExisting") is False and affected == 1: op = run.ops[index] # If _id is in both the update document *and* the query spec # the update document _id takes precedence. _id = op['u'].get('_id', op['q'].get('_id')) doc = {_UINDEX: run.index(index), _UID: _id} full_result["upserted"].append(doc) full_result['nUpserted'] += affected else: full_result['nMatched'] += affected elif run.op_type == _DELETE: full_result['nRemoved'] += affected
def add_insert(self, document): """Add an insert document to the list of ops. """ validate_is_document_type("document", document) # Generate ObjectId client side. if not (isinstance(document, RawBSONDocument) or '_id' in document): document['_id'] = ObjectId() self.ops.append((_INSERT, document))
def _get_oid(data, position, dummy0, dummy1, dummy2): """Decode a BSON ObjectId to bson.objectid.ObjectId.""" end = position + 12 return ObjectId(data[position:end]), end
def test_modify_unknown_object(self): yield from self.maindoc.objects.modify(id=ObjectId(), docname='dn') total = yield from self.maindoc.objects.all().count() self.assertEqual(total, 0) self.assertFalse(None)
def insertMongoDB(items): collection_name = 'wechat' client = pymongo.MongoClient(MONGO_URI) db = client[MONGO_DATABASE] for item in items: item['_id'] = str(ObjectId()) db[collection_name].insert(dict(item))
def process_item(self, item, spider): item['source'] = SPIDER_NAME[spider.name] item['_id'] = str(ObjectId()) collection_name = "wechat" if spider.name == "wechat" else self.collection_name self.db[collection_name].insert(dict(item)) return item
def add_insert(self, document): """Add an insert document to the list of ops. """ if not isinstance(document, dict): raise TypeError('document must be an instance of dict') # Generate ObjectId client side. if '_id' not in document: document['_id'] = ObjectId() self.ops.append((_INSERT, document))
def object_hook(dct, compile_re=True): if "$oid" in dct: return ObjectId(str(dct["$oid"])) if "$ref" in dct: return DBRef(dct["$ref"], dct["$id"], dct.get("$db", None)) if "$date" in dct: secs = float(dct["$date"]) / 1000.0 return EPOCH_AWARE + datetime.timedelta(seconds=secs) if "$regex" in dct: flags = 0 # PyMongo always adds $options but some other tools may not. for opt in dct.get("$options", ""): flags |= _RE_OPT_TABLE.get(opt, 0) if compile_re: return re.compile(dct["$regex"], flags) else: return Regex(dct["$regex"], flags) if "$minKey" in dct: return MinKey() if "$maxKey" in dct: return MaxKey() if "$binary" in dct: if isinstance(dct["$type"], int): dct["$type"] = "%02x" % dct["$type"] subtype = int(dct["$type"], 16) if subtype >= 0xffffff80: # Handle mongoexport values subtype = int(dct["$type"][6:], 16) return Binary(base64.b64decode(dct["$binary"].encode()), subtype) if "$code" in dct: return Code(dct["$code"], dct.get("$scope")) if bson.has_uuid() and "$uuid" in dct: return bson.uuid.UUID(dct["$uuid"]) return dct
def _get_oid(data, position, as_class=None, tz_aware=False, uuid_subtype=OLD_UUID_SUBTYPE, compile_re=True): value = ObjectId(data[position:position + 12]) position += 12 return value, position
def validate(self, value): # avoiding circular reference from motorengine import Document if not isinstance(self.reference_type, type) or not issubclass(self.reference_type, Document): raise ValueError( "The field 'reference_document_type' argument must be a subclass of Document, not '%s'." % ( str(self.reference_type) ) ) if value is not None and not isinstance(value, (self.reference_type, ObjectId)): return False return value is None or isinstance(value, ObjectId) or (hasattr(value, '_id') and value._id is not None)
def to_son(self, value): if value is None: return None if isinstance(value, ObjectId): return value return value._id