我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用logging.config.get()。
def _get_home(): """Find user's home directory if possible. Otherwise, returns None. :see: http://mail.python.org/pipermail/python-list/2005-February/325395.html This function is copied from matplotlib version 1.4.3, Jan 2016 """ try: if six.PY2 and sys.platform == 'win32': path = os.path.expanduser(b"~").decode(sys.getfilesystemencoding()) else: path = os.path.expanduser("~") except ImportError: # This happens on Google App Engine (pwd module is not present). pass else: if os.path.isdir(path): return path for evar in ('HOME', 'USERPROFILE', 'TMP'): path = os.environ.get(evar) if path is not None and os.path.isdir(path): return path return None
def _dump_offer_list(self, offerids, sortkey='name'): """ Utility routine to dump a list of offers """ offers = [] for offerid in offerids: offer = self.MarketState.State[offerid] offer['id'] = offerid offers.append(offer) print "{0:8} {1:35} {2:35} {3}".format( 'Ratio', 'Input Asset (What You Pay)', 'Output Asset (What You Get)', 'Name') for offer in sorted(offers, key=lambda h: h[sortkey]): iholding = self.MarketState.State[offer['input']] oholding = self.MarketState.State[offer['output']] name = self.MarketState.i2n(offer.get('id')) print "{0:<8} {1:35} {2:35} {3}".format( offer['ratio'], self.MarketState.i2n(iholding['asset']), self.MarketState.i2n(oholding['asset']), name)
def svn_ver(svn_dir, do_update=True): """pull the latest content and get version svn info <path> | grep "Last Changed Rev" | cut -d" " -f4 svn info --show-item last-changed-revision <path> """ if do_update: cmd = 'svn up %s' % svn_dir (cost, out, err) = Command.excute(cmd) if len(err) > 0: Command.logger.error('excute[%s]: %s' %(cmd, err)) cmd = 'svn info --show-item last-changed-revision %s' % svn_dir (cost, out, err) = Command.excute(cmd) if len(err) > 0: Command.logger.error('excute[%s]: %s' %(cmd, err)) if len(out) > 0: return out.strip() else: return ''
def __init__(self, ini_file, dry_run=False): self.dry_run = dry_run self.build_info = [] self.logger = logging.getLogger('util.agentBuilder') import ConfigParser # config = ConfigParser.RawConfigParser(allow_no_value=True) config = ConfigParser.ConfigParser() config.read(ini_file) root_dir = config.get('base', 'root_dir') self.lib_base_dir = config.get('base', 'lib_base_dir', 0, {'root_dir': root_dir}) self.channels_dir = config.get('base', 'channels_dir', 0, {'root_dir': root_dir}) self.demo_dir = config.get('demo', 'demo_dir', 0, {'root_dir': root_dir}) from datetime import date self.apk_dir = config.get('demo', 'apk_dir', 0, {'root_dir': root_dir, 'day':date.today().strftime('%m%d')}) self.plugin_dir = config.get('plugins', 'plugin_dir', 0, {'root_dir': root_dir})
def update(self): """ Update sirbot Trigger the update method of the plugins. This is needed if the plugins need to perform update migration (i.e database) """ logger.info('Updating Sir Bot-a-lot') for name, plugin in self._plugins.items(): plugin_update = getattr(plugin['plugin'], 'update', None) if callable(plugin_update): logger.info('Updating %s', name) await plugin_update(self.config.get(name, {}), self._plugins) logger.info('%s updated', name) self._session.close() logger.info('Sir Bot-a-lot updated')
def get(setting): """ Get configuration setting. ``setting`` is the setting without the ``ckanext.extractor.`` prefix. Handles defaults and transformations. """ setting = 'ckanext.extractor.' + setting value = config.get(setting, DEFAULTS[setting]) for transformation in TRANSFORMATIONS[setting]: value = transformation(value) return value # Adapted from ckanext-archiver
def show_projects(ctx): """Show projects.""" from fulmar.scheduler.projectdb import projectdb projects = projectdb.get_all() table = [] headers = ['project_name', 'updated_time', 'is_stopped'] for _, project in projects.iteritems(): project_name = project.get('project_name') update_timestamp = project.get('update_time') update_time = datetime.datetime.fromtimestamp(update_timestamp).strftime('%Y-%m-%d %H:%M:%S') is_stopped = 'True' if project.get('is_stopped') else 'False' table.append([project_name, update_time, is_stopped]) click.echo(tabulate(table, headers, tablefmt="grid", numalign="right"))
def trace(self, kwargs): exc_type, exc_value, exc_traceback = sys.exc_info() stack = traceback.extract_tb(exc_traceback) lines = [] for i, s in enumerate(stack): filename = s.filename l = len(filename) shortfile = kwargs.get('shortfile', 40) if l > shortfile: filename = filename[filename.find('/', l - shortfile):] line = '%-40s:%-4s %s' % ( blue() + filename, yellow() + str(s.lineno), '|' + '-' * (i * 4) + cyan() + s.name + ':' + red() + s.line) lines.append(line) lines = '\n\t'.join(lines) kwargs['extra'] = { 'trace': magenta() + str(exc_type) + ' ' + bold() + magenta() + str(exc_value) + '\n\t' + lines}
def setup_logging(self): self.logpath = os.environ.get(self.LOGENVCONF, self.LOGGING) try: #logging.config.fileConfig(self.logpath) with open(self.logpath) as logconfig: logdata = yaml.load(logconfig) logging.config.dictConfig(logdata) except Exception as e: print("Error with '%s': %s" % (self.logpath, e)) logging.basicConfig(level=self.LOGLEVEL, format=self.OUTPUT_FORMAT) lfile = False else: lfile = True self.logger = logging.getLogger(self.PROG) if not lfile: self.logger.info("Using default logging settings") else: self.logger.info("Using logging settings from '%s'" % self.logpath)
def setup_users(self, users=[], domain='default'): users_defined = [] for user in users: name = user['name'] mail = user.get('email', None) desc = user.get('description', None) password = user.get('password', None) udomain = user.get('domain', domain) groups = user.get('groups', []) user_id = self.setup_user(name, password, mail, desc, udomain) group_ids = [] for group in groups: gr = self.setup_group(group, None, udomain) group_ids.append(gr.id) self.setup_user_groups(user_id, group_ids, udomain) users_defined.append(user_id) return users_defined
def gitlab_push(request, config): data = json.loads((yield from request.content.read()).decode()) branch = data['ref'].split('refs/heads/')[1] config.reload() repo_manager = RepoManager(config['name'], config['uri'], config['merge_workspace']) handler = PushHandler(config, branch, data['commits'], repo_manager) # re.match(r'(.+?:\/\/.+?)\/', data['repository']['homepage']).group(1), loop = asyncio.get_event_loop() # Close connection first, then handle # (if gitlab can not get response for too long it is repeating request) loop.call_soon(handler.handle) return web.Response()
def start_servers(self): ssl = config.get('ssl', 'privatekey') # Launch Server if config.get('jsonrpc', 'listen'): from trytond.protocols.jsonrpc import JSONRPCDaemon for hostname, port in parse_listen( config.get('jsonrpc', 'listen')): self.jsonrpcd.append(JSONRPCDaemon(hostname, port, ssl)) self.logger.info("starting JSON-RPC%s protocol on %s:%d", ssl and ' SSL' or '', hostname or '*', port) if config.get('xmlrpc', 'listen'): from trytond.protocols.xmlrpc import XMLRPCDaemon for hostname, port in parse_listen( config.get('xmlrpc', 'listen')): self.xmlrpcd.append(XMLRPCDaemon(hostname, port, ssl)) self.logger.info("starting XML-RPC%s protocol on %s:%d", ssl and ' SSL' or '', hostname or '*', port) if config.get('webdav', 'listen'): from trytond.protocols.webdav import WebDAVServerThread for hostname, port in parse_listen( config.get('webdav', 'listen')): self.webdavd.append(WebDAVServerThread(hostname, port, ssl)) self.logger.info("starting WebDAV%s protocol on %s:%d", ssl and ' SSL' or '', hostname or '*', port) for servers in (self.xmlrpcd, self.jsonrpcd, self.webdavd): for server in servers: server.start()
def setup_console_logging(verbosity_level): if verbosity_level < min(LOG_LEVELS.keys()): verbosity_level = min(LOG_LEVELS.keys()) if verbosity_level > max(LOG_LEVELS.keys()): verbosity_level = max(LOG_LEVELS.keys()) # loglevels = config.get('loglevels', {}) loglevels = {} has_debug_loglevels = any([ level < logging.INFO for level in loglevels.values()]) verbosity_filter = VerbosityFilter(verbosity_level, loglevels) if verbosity_level < 1 and not has_debug_loglevels: log_format = "%(levelname)-8s %(message)s" else: # log_format = "(%(threadName)-9s) %(log_color)s%(levelname)-8s%(reset)s (%(funcName)-5s) %(message_log_color)s%(message)s" log_format = "%(levelname)-8s %(asctime)s [%(process)d:%(threadName)s] %(name)s\n %(message)s" # source: Logging Cookbook — Python 3.6.0 documentation # log_format = "%(asctime)-15s %(name)-5s %(levelname)-8s IP: %(ip)-15s User: %(user)-8s %(message)s" formatter = logging.Formatter(log_format) # if config['logging']['color']: # handler = ColorizingStreamHandler({}) # else: # handler = logging.StreamHandler() handler = ColorizingStreamHandler({}) handler.addFilter(verbosity_filter) handler.setFormatter(formatter) logging.getLogger('').addHandler(handler) # def setup_debug_logging_to_file(config): # formatter = logging.Formatter(config['logging']['debug_format']) # handler = logging.handlers.RotatingFileHandler( # config['logging']['debug_file'], maxBytes=10485760, backupCount=3) # handler.setFormatter(formatter) # # logging.getLogger('').addHandler(handler)
def create_app(config=None): app = Flask(__name__) app.config.update(config) app.wsgi_app = SessionMiddleware(app.wsgi_app, config['session']) app.register_blueprint(bp) if config.get('logger'): logging.config.dictConfig(config.get('logger')) _configure_error_handlers(app) _setup_requests(app) return app
def git_ver(git_dir): """pull the latest content and get version git revision cmd: git rev-list --count HEAD """ cmd = 'git -C %s pull' % git_dir (cost, out, err) = Command.excute(cmd) if len(err) > 0: Command.logger.error('excute[%s]: %s' %(cmd, err)) cmd = 'git -C %s rev-list --count HEAD' % git_dir (cost, out, err) = Command.excute(cmd) if len(err) > 0: Command.logger.error('excute[%s]: %s' %(cmd, err)) return '' else: return out.strip('\n')
def connect_euca(host=None, aws_access_key_id=None, aws_secret_access_key=None, port=8773, path='/services/Eucalyptus', is_secure=False, **kwargs): """ Connect to a Eucalyptus service. :type host: string :param host: the host name or ip address of the Eucalyptus server :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.connection.EC2Connection` :return: A connection to Eucalyptus server """ from boto.ec2 import EC2Connection from boto.ec2.regioninfo import RegionInfo # Check for values in boto config, if not supplied as args if not aws_access_key_id: aws_access_key_id = config.get('Credentials', 'euca_access_key_id', None) if not aws_secret_access_key: aws_secret_access_key = config.get('Credentials', 'euca_secret_access_key', None) if not host: host = config.get('Boto', 'eucalyptus_host', None) reg = RegionInfo(name='eucalyptus', endpoint=host) return EC2Connection(aws_access_key_id, aws_secret_access_key, region=reg, port=port, path=path, is_secure=is_secure, **kwargs)
def connect_walrus(host=None, aws_access_key_id=None, aws_secret_access_key=None, port=8773, path='/services/Walrus', is_secure=False, **kwargs): """ Connect to a Walrus service. :type host: string :param host: the host name or ip address of the Walrus server :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to Walrus """ from boto.s3.connection import S3Connection from boto.s3.connection import OrdinaryCallingFormat # Check for values in boto config, if not supplied as args if not aws_access_key_id: aws_access_key_id = config.get('Credentials', 'euca_access_key_id', None) if not aws_secret_access_key: aws_secret_access_key = config.get('Credentials', 'euca_secret_access_key', None) if not host: host = config.get('Boto', 'walrus_host', None) return S3Connection(aws_access_key_id, aws_secret_access_key, host=host, port=port, path=path, calling_format=OrdinaryCallingFormat(), is_secure=is_secure, **kwargs)
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None, is_secure=False, **kwargs): """ Connect to the Internet Archive via their S3-like API. :type ia_access_key_id: string :param ia_access_key_id: Your IA Access Key ID. This will also look in your boto config file for an entry in the Credentials section called "ia_access_key_id" :type ia_secret_access_key: string :param ia_secret_access_key: Your IA Secret Access Key. This will also look in your boto config file for an entry in the Credentials section called "ia_secret_access_key" :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to the Internet Archive """ from boto.s3.connection import S3Connection from boto.s3.connection import OrdinaryCallingFormat access_key = config.get('Credentials', 'ia_access_key_id', ia_access_key_id) secret_key = config.get('Credentials', 'ia_secret_access_key', ia_secret_access_key) return S3Connection(access_key, secret_key, host='s3.us.archive.org', calling_format=OrdinaryCallingFormat(), is_secure=is_secure, **kwargs)
def local_main(config): controller = ClientController(config['LedgerURL'], keystring=config.get('SigningKey')) controller.cmdloop()
def _initialize_plugins(self): """ Initialize the plugins Query the configuration and the plugins for info (name, registry name, start priority, etc) """ logger.debug('Initializing plugins') plugins = self._pm.hook.plugins(loop=self._loop) if plugins: for plugin in plugins: name = plugin.__name__ registry_name = plugin.__registry__ or plugin.__name__ config = self.config.get(name, {}) priority = config.get('priority', 50) if priority: self._plugins[name] = { 'plugin': plugin, 'config': config, 'priority': priority, 'factory': registry_name } self._start_priority[priority].append(name) else: logger.error('No plugins found')
def get_config(section, key, default=None): config = get_config_parser() if default is None: return config.get(section=section, option=key) try: return config.get(section=section, option=key) except configparser.NoOptionError: return default except configparser.NoSectionError: return default
def is_field_indexed(field): """ Check if a metadata field is configured to be indexed. """ return _any_match(field.lower(), get('indexed_fields'))
def is_format_indexed(format): """ Check if a resource format is configured to be indexed. """ return _any_match(format.lower(), get('indexed_formats'))
def __init__(self, configfile=None): self.verbose = False self.market = "poloniex" self.api_key = None self.api_secret = None if configfile: logging.config.fileConfig(configfile.name) config = configparser.ConfigParser() config.readfp(configfile) exchange = config.get("DEFAULT", "exchange") self.api_key = config.get(exchange, "api_key") self.api_secret = config.get(exchange, "api_secret")
def init_app(self, app): config = app.config.get('LOGGING', DEFAULT_LOG) logs_folder = config['LOGGING_FOLDER'] formatter = Formatter( config.get('formatter', DEFAULT_LOG['formatter'])) info_log = os.path.join(logs_folder, config.get('info', DEFAULT_LOG['info'])) info_file_handler = logging.handlers.RotatingFileHandler( info_log, maxBytes=100000, backupCount=10) info_file_handler.setLevel(logging.INFO) info_file_handler.setFormatter(formatter) app.logger.addHandler(info_file_handler) error_log = os.path.join(logs_folder, config.get('error', DEFAULT_LOG['error'])) error_file_handler = logging.handlers.RotatingFileHandler( error_log, maxBytes=100000, backupCount=10) error_file_handler.setLevel(logging.ERROR) error_file_handler.setFormatter(formatter) app.logger.addHandler(error_file_handler) if app.config.get('send_mail', DEFAULT_LOG['send_mail']): credentials = (config['MAIL_USERNAME'], config['MAIL_PASSWORD']) mailhost = (config['MAIL_SERVER'], config['MAIL_PORT']) mail_handler = ThreadedSMTPHandler( secure=(), mailhost=mailhost, fromaddr=config['MAIL_DEFAULT_SENDER'], toaddrs=config['MAIL_ADMIN'], subject=config.get('subject', DEFAULT_LOG['subject']), credentials=credentials) mail_handler.setLevel(logging.ERROR) mail_handler.setFormatter(formatter) app.logger.addHandler(mail_handler)
def get_project_by_path(self, project_path): try: return self.gitlab.projects.get(project_path) except GitlabConnectionError as e: err('Cannot connect to the gitlab server: %s', e) except GitlabGetError: err('Project [%s] not found', project_path) except GitlabError as e: err('Error when getting project [%s]: %s' % (project_path, e))
def get_remote_branch_name(self, project, local_branch, remote): # check if there is upstream for local branch tracking_branch = self.repo.branches[local_branch].tracking_branch() if tracking_branch: remote_branch = tracking_branch.name.partition('/')[2] else: remote_branch = local_branch try: project.branches.get(remote_branch) except GitlabGetError: err('Branch [%s] from project [%s] not found', remote_branch, project.path_with_namespace) except GitlabConnectionError as e: err('%s', e) return remote_branch
def check_branch(project, branch): try: project.branches.get(branch) except GitlabGetError as e: err( 'Cannot find branch [%(branch)s] for project [%(project)s]', {'branch': branch, 'project': project.path_with_namespace}, ) except GitlabError as e: err('Gitlab error: %s', e) except GitlabConnectionError as e: err('%s', e)
def validate_mr_data(source_project, target_project, data): check_branch(source_project, data['source_branch']) check_branch(target_project, data['target_branch']) if not data.get('title'): err('Empty [title]. Specify title of the merge request.')
def edit_mr(data, source_project, target_project, commits): editor = os.environ.get('EDITOR', 'nano') title = data.get('title') assignee = data.get('assignee') description = data.get('description') content = ( 'Title:\n' '{title}\n' 'Assignee:\n' '{assignee}\n' 'Description:\n' '\n' '# You are creating a merge request:\n' '#\t{outline}\n' '#\n' '# Next commits will be included in the merge request:\n' '#\n' '{commits}\n' '#\n' '# Empty title will cancel the merge request.' ).format( title='{}\n'.format(title) if title else '', assignee='{}\n'.format(assignee) if assignee else '', description='{}\n'.format(description) if description else '', outline=get_mr_outline(data, source_project, target_project), commits=format_mr_commits(commits, prefix='#\t'), ) with tempfile.NamedTemporaryFile() as tf: tf.write(content.encode('utf-8')) tf.flush() res = subprocess.run([editor, tf.name]) tf.seek(0) new_data = data.copy() new_data.update(parse_mr_file(tf)) return new_data
def get_logger(name=''): logging.config.dictConfig(get_config( project_name=config['project_name'], logs_dir=config['logs_dir'], smtp_settings=config.get('smtp') )) return logging.getLogger(name)
def cli(ctx, **kwargs): """A crawler system.""" logging.config.fileConfig(kwargs['logging_config']) config = {} config_filepath = kwargs['config'] if config_filepath: if not os.path.exists(config_filepath): raise IOError('No such file or directory: "%s".' % config_filepath) if not os.path.isfile(config_filepath): raise IOError('Is not a file: "%s".' % config_filepath) try: with open(config_filepath, 'r') as f: config = yaml.load(f) except Exception as err: raise err if kwargs.get('redis'): redis_conn = utils.connect_redis(kwargs['redis']) elif config.get('redis'): redis_conn = utils.connect_redis(config['redis']['url']) else: raise Exception('Could not find redis address.') mongodb_conn = None if kwargs.get('mongodb'): mongodb_conn = utils.connect_mongodb(kwargs['mongodb']) elif config.get('mongodb'): mongodb_conn = utils.connect_mongodb(config['mongodb']['url']) else: logging.warning('Could not find mongodb address. No results will be saved.') from fulmar.utils import LUA_RATE_LIMIT_SCRIPT lua_rate_limit = redis_conn.register_script(LUA_RATE_LIMIT_SCRIPT) setattr(utils, 'redis_conn', redis_conn) setattr(utils, 'lua_rate_limit',lua_rate_limit) setattr(utils, 'mongodb_conn', mongodb_conn) ctx.obj = utils.ObjectDict(ctx.obj or {}) ctx.obj.update(config) return ctx
def phantomjs(ctx, phantomjs_path, port, auto_restart, args): """ Run phantomjs if phantomjs is installed. """ args = args or ctx.default_map and ctx.default_map.get('args', []) import subprocess g = ctx.obj _quit = [] phantomjs_fetcher = os.path.join( os.path.dirname(fulmar.__file__), 'worker/phantomjs_fetcher.js') cmd = [phantomjs_path, # this may cause memory leak: https://github.com/ariya/phantomjs/issues/12903 #'--load-images=false', '--ssl-protocol=any', '--disk-cache=true'] + list(args or []) + [phantomjs_fetcher, str(port)] try: _phantomjs = subprocess.Popen(cmd) except OSError: logging.warning('phantomjs not found.') return None if not g.get('phantomjs_proxy'): g['phantomjs_proxy'] = '127.0.0.1:%s' % port while True: _phantomjs.wait() if _quit or not auto_restart: break _phantomjs = subprocess.Popen(cmd)
def all(ctx): """ Start scheduler and worker, also run phantomjs if phantomjs is installed. """ g = ctx.obj sub_processes = [] threads = [] try: if not g.get('phantomjs_proxy'): phantomjs_config = g.get('phantomjs', {}) phantomjs_config.setdefault('auto_restart', True) sub_processes.append(utils.run_in_subprocess(ctx.invoke, phantomjs, **phantomjs_config)) time.sleep(2) if sub_processes[-1].is_alive() and not g.get('phantomjs_proxy'): g['phantomjs_proxy'] = '127.0.0.1:%s' % phantomjs_config.get('port', 25555) scheduler_config = g.get('scheduler', {}) threads.append(utils.run_in_thread(ctx.invoke, scheduler, **scheduler_config)) worker_config = g.get('worker', {}) threads.append(utils.run_in_thread(ctx.invoke, worker, **worker_config)) while threads: for t in threads: if not t.isAlive(): threads.remove(t) time.sleep(0.1) for sub_process in sub_processes: sub_process.join() except KeyboardInterrupt: logging.info('Keyboard interrupt. Bye, bye.') finally: # Need to kill subprocesses: phantomjs. for process in sub_processes: process.terminate()
def start_project(ctx, project): """Start a project.""" from fulmar.message_queue import newtask_queue from fulmar.scheduler.projectdb import projectdb if not os.path.exists(project): raise IOError('No such file or directory: "%s".' % project) if not os.path.isfile(project): raise IOError('Is not a Python file: "%s".' % project) if not project.endswith('.py'): raise TypeError('Not a standard Python file: "%s". Please make sure it is a Python file which ends with ".py".' % project) project_name = project.split('/')[-1].strip(' .py') project_data = projectdb.get(project_name) if not project_data: ctx.invoke(update_project, project_file=project) project_data = projectdb.get(project_name) if project_data.get('is_stopped'): project_data.update({'is_stopped': False}) projectdb.set(project_name, project_data) newtask = { "project_name": project_name, 'project_id': project_data.get('project_id'), "taskid": project_name + ': on_start', "url": 'first_task: ' + project_name, "process": { "callback": "on_start", }, "schedule": { "is_cron": True }, } newtask_queue.put(newtask) click.echo('Successfully start project: "%s".' % project_name)
def delete_project(ctx, project_name): """Delete a project.""" from fulmar.scheduler.projectdb import projectdb project_name = project_name.split('/')[-1].strip(' .py') project_data = projectdb.get(project_name) if not project_data: click.echo('Sorry, can not find project: "%s".' % project_name) return projectdb.delete(project_name) click.echo('\nSuccessfully delete project: "%s".\n' % project_name)
def read_config(self): # if self.config: # return config = ConfigParser.RawConfigParser() config.read(self.config_file) for s in config.sections(): if s == 'global': if config.has_option(s, 'poll_interval'): self.poll_interval = int(config.get(s, 'poll_interval')) if config.has_option(s, 'newrelic_license_key'): self.license_key = config.get(s, 'newrelic_license_key') continue if not config.has_option(s, 'name') or not config.has_option(s, 'url'): continue ns = NginxStatusCollector(s, config.get(s, 'name'), config.get(s, 'url'), self.poll_interval) if config.has_option(s, 'http_user') and config.has_option(s, 'http_pass'): ns.basic_auth = base64.b64encode(config.get(s, 'http_user') + b':' + config.get(s, 'http_pass')) self.sources.append(ns) self.config = config
def droplet_worker(args, config, droplet, work_queue): while not work_queue.empty(): # Get target info from queue target = work_queue.get() print(droplet.id, "Grabbing work...{}") args.workspace = target[0] args.domains = target[1] args.droplet = droplet # Run recon and import to elastic reconng.parse_args(args, config) print("Done working...") else: print("DONE")
def dict_overwrite(base, default={}): """Creates a new dictionary overwriting the values from the default dictionary completing with the base key/values. """ # clone current level new = default.copy() for key,value in base.items(): if isinstance(value, list): new[key] = value[:] elif isinstance(value, dict): new[key] = dict_override(value, default.get(key, {})) else: new[key] = value return new
def setup_groups(self, groups=[], domain='default'): groups_defined = [] for gr in groups: name = gr['name'] desc = gr.get('description', None) gdomain = gr.get('domain', domain) group_id = self.setup_group(name, desc, gdomain) groups_defined.append(group_id) return groups_defined
def setup_nova_quotas(self, project_id, instances=None, cores=None, ram_mb=None, floating_ips=None): self._get_nova_client() updated = False msg = "Updating nova quota %s for project id '%s' from %s to %s" qs = self.nova.quotas.get(project_id).to_dict() if instances is not None and qs['instances'] != instances: self.logger.debug(msg % ( 'instances', project_id, qs['instances'], instances)) qs['instances'] = instances updated = True if cores is not None and qs['cores'] != cores: self.logger.debug(msg % ('cores', project_id, qs['cores'], cores)) qs['cores'] = cores updated = True if ram_mb is not None and qs['ram'] != ram_mb: self.logger.debug(msg % ( 'ram_mb', project_id, qs['ram'], ram_mb)) qs['ram'] = ram_mb updated = True if floating_ips is not None and qs['floating_ips'] != floating_ips: self.logger.debug(msg % ( 'floating_ips', project_id, qs['floating_ips'], floating_ips)) qs['floating_ips'] = floating_ips updated = True if updated: try: self.nova.quotas.update( project_id, instances=qs['instances'], cores=qs['cores'], ram=qs['ram'], floating_ips=qs['floating_ips']) except nova_exceptions.ClientException as e: msg = "Unable to setup new Nova quotas for project id '%s': %s" self.logger.error(msg % (project_id, e)) raise msg = "Nova quotas updated successfully for project id '%s'" self.logger.info(msg % project_id) else: msg = "Nova quotas update not needed for project id '%s'" self.logger.debug(msg % project_id) return updated