我们从Python开源项目中,提取了以下6个代码示例,用于说明如何使用django.conf.settings.ELASTICSEARCH_URL。
def __init__(self, start, end, telescopes=None, sites=None, instrument_types=None): try: self.es = Elasticsearch([settings.ELASTICSEARCH_URL]) except LocationValueError: logger.error('Could not find host. Make sure ELASTICSEARCH_URL is set.') raise ImproperlyConfigured('ELASTICSEARCH_URL') self.instrument_types = instrument_types self.available_telescopes = self._get_available_telescopes() sites = list({tk.site for tk in self.available_telescopes}) if not sites else sites telescopes = list({tk.telescope for tk in self.available_telescopes if tk.site in sites}) \ if not telescopes else telescopes self.start = start.replace(tzinfo=timezone.utc).replace(microsecond=0) self.end = end.replace(tzinfo=timezone.utc).replace(microsecond=0) cached_event_data = cache.get('tel_event_data') if cached_event_data: self.event_data = cached_event_data else: self.event_data = self._get_es_data(sites, telescopes) cache.set('tel_event_data', self.event_data, 1800)
def init_es(timeout=TIMEOUT): log.info("connecting to %s %s", settings.ELASTICSEARCH_URL, settings.ELASTICSEARCH_PORT) auth = AWSRequestsAuth(aws_access_key=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, aws_host=settings.ELASTICSEARCH_URL, aws_region='us-west-1', aws_service='es') auth.encode = lambda x: bytes(x.encode('utf-8')) es = Elasticsearch(host=settings.ELASTICSEARCH_URL, port=settings.ELASTICSEARCH_PORT, connection_class=RequestsHttpConnection, timeout=timeout, max_retries=10, retry_on_timeout=True, http_auth=auth) return es
def correct_orphan_records(self, provider='europeana', end=None): """[#185] Delete records from the search engine which aren't found in the database""" s = Search() q = Q('term', provider=provider) s = s.query(q) response = s.execute() total = response.hits.total # A file extracted from the production database listing all of the europeana identifiers identifier_file = '/tmp/europeana-identifiers.json' db_identifiers = set(json.load(open(identifier_file))) total_in_db = len(db_identifiers) log.info("Using search engine instance %s", settings.ELASTICSEARCH_URL) log.info("Total records: %d (search engine), %d (database) [diff=%d]", total, total_in_db, total - total_in_db) deleted_count = 0 for r in s.scan(): if r.identifier not in db_identifiers: img = search.Image.get(id=r.identifier) log.debug("Going to delete image %s", img) deleted_count += 1 log.info("Deleted %d from search engine", deleted_count)
def correct_license_capitalization(self, provider='europeana', end=None): """[#186] Correct license capitalization""" s = Search() q = Q('term', provider=provider) s = s.query(q) response = s.execute() total = response.hits.total log.info("Using search engine instance %s", settings.ELASTICSEARCH_URL) mod_count = 0 count = 0 for r in s.scan(): if not r.license.islower(): img = search.Image.get(id=r.identifier) log.debug("[%d] Changing license %s to %s", count, img.license, img.license.lower()) img.update(license=img.license.lower()) mod_count += 1 count += 1 log.info("Modified %d records in search engine", mod_count)
def init(timeout=TIMEOUT): """Initialize all search objects""" es = init_es(timeout=timeout) connections.add_connection('default', es) log.debug("Initializing search objects for connection %s:%s", settings.ELASTICSEARCH_URL, settings.ELASTICSEARCH_PORT) return es
def get_conn(verify=True, verify_index=None): """ Lazily create the connection. """ # pylint: disable=global-statement global _CONN global _CONN_VERIFIED do_verify = False if _CONN is None: http_auth = settings.ELASTICSEARCH_HTTP_AUTH use_ssl = http_auth is not None _CONN = connections.create_connection( hosts=[settings.ELASTICSEARCH_URL], http_auth=http_auth, use_ssl=use_ssl, # make sure we verify SSL certificates (off by default) verify_certs=use_ssl ) # Verify connection on first connect if verify=True. do_verify = verify if verify and not _CONN_VERIFIED: # If we have a connection but haven't verified before, do it now. do_verify = True if not do_verify: if not verify: # We only skip verification if we're reindexing or # deleting the index. Make sure we verify next time we connect. _CONN_VERIFIED = False return _CONN # Make sure everything exists. if verify_index is None: verify_index = get_default_alias() if not _CONN.indices.exists(verify_index): raise ReindexException("Unable to find index {index_name}".format( index_name=verify_index )) for doc_type in VALIDATABLE_DOC_TYPES: mapping = _CONN.indices.get_mapping(index=verify_index, doc_type=doc_type) if not mapping: raise ReindexException("Mapping {doc_type} not found".format( doc_type=doc_type )) _CONN_VERIFIED = True return _CONN