Python subprocess32 模块,call() 实例源码

我们从Python开源项目中,提取了以下23个代码示例,用于说明如何使用subprocess32.call()

项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def run(inventory, shell_cmd, limit, *args, **kwargs):
    env = ansible_env(os.environ.copy())
    cmd = [
        'ansible', limit, '-i', get_inventory_file(inventory),
        '-m', 'shell'
    ]

    if verbosity():
        cmd += ['-' + ('v' * verbosity())]

    cmd += args
    cmd += ['-a', shell_cmd]

    logger.info('running %s', ' '.join(map(quote, cmd)))
    logger.debug('env: %r', env)

    return call(cmd, start_new_session=True, env=env, **kwargs)
项目:lain    作者:laincloud    | 项目源码 | 文件源码
def up_node1():
    subproc.check_call(['vagrant', 'destroy', '-f', 'node1'])
    subproc.check_call(['vagrant', 'up', 'node1', '--no-provision'])
    yield "node1 is ready"

    print("Destroying node1...")
    subproc.call(['vagrant', 'destroy', '-f', 'node1'])
    print("Node1 is destroyed.")
项目:lain    作者:laincloud    | 项目源码 | 文件源码
def up_node2():
    subproc.check_call(['vagrant', 'destroy', '-f', 'node2'])
    subproc.check_call(['vagrant', 'up', 'node2'])
    yield "node2 is ready"

    print("Destroying node2...")
    subproc.call(['vagrant', 'destroy', '-f', 'node2'])
    print("Node2 is destroyed.")
项目:lain    作者:laincloud    | 项目源码 | 文件源码
def up_node3():
    subproc.check_call(['vagrant', 'destroy', '-f', 'node3'])
    subproc.check_call(['vagrant', 'up', 'node3'])
    yield "node3 is ready"

    print("Destroying node3...")
    subproc.call(['vagrant', 'destroy', '-f', 'node3'])
    print("Node3 is destroyed.")
项目:downtoearth    作者:cleardataeng    | 项目源码 | 文件源码
def run_terraform(self, tfvar_file):
        """Return a apply terraform after template rendered."""
        path = os.path.dirname(self.args.output)
        tfvar_file = os.path.join(path, tfvar_file)
        self.get_lambda_versions_file(tfvar_file)
        affirm = ['true', 'y', 'yes']
        decline = ['', 'false', 'n', 'no']
        tf_cmds = {
            'apply': 'terraform apply -var-file={} {}'.format(tfvar_file, path),
            'plan': 'terraform plan -var-file={} {}'.format(tfvar_file, path)
        }
        quit_cmds = ['q', 'quit']
        while True:
            run_tf = input("Run terraform? [y/N] ").lower()
            if run_tf in affirm + decline:
                run_tf = run_tf not in decline
                break
            print('Try again.')
        if run_tf is True:
            while True:
                tf_cmd = input("terraform apply or plan? [apply/plan/quit] ")
                if tf_cmd in tf_cmds:
                    subprocess.call(tf_cmds[tf_cmd], shell=True)
                    break
                if tf_cmd.lower() in quit_cmds:
                    break
                print('Try again.')
            return
        print('command to show plan:\n\t{}'.format(tf_cmds['plan']))
        print('command to apply:\n\t{}'.format(tf_cmds['apply']))
项目:ATX    作者:NetEaseGame    | 项目源码 | 文件源码
def windows_kill(self):
        subprocess.call(['taskkill', '/F', '/IM', 'chromedriver.exe', '/T'])
项目:afl-sancov    作者:bshastry    | 项目源码 | 文件源码
def run_cmd(self, cmd, collect, env=None):

        out = []

        if self.args.verbose:
            self.logr("    CMD: %s" % cmd)

        fh = None
        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            fh = open(self.cov_paths['tmp_out'], 'w')
        else:
            fh = open(os.devnull, 'w')

        if env is None:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, executable='/bin/bash')
        else:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, env=env, executable='/bin/bash')

        fh.close()

        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            with open(self.cov_paths['tmp_out'], 'r') as f:
                for line in f:
                    out.append(line.rstrip('\n'))

        return out
项目:afl-sancov    作者:bshastry    | 项目源码 | 文件源码
def run_cmd(self, cmd, collect, env=None):

        out = []

        if self.args.verbose:
            self.logr("    CMD: %s" % cmd)

        fh = None
        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            fh = open(self.cov_paths['tmp_out'], 'w')
        else:
            fh = open(os.devnull, 'w')

        if env is None:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, executable='/bin/bash')
        else:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, env=env, executable='/bin/bash')

        fh.close()

        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            with open(self.cov_paths['tmp_out'], 'r') as f:
                for line in f:
                    out.append(line.rstrip('\n'))

        return out
项目:MatchingMarkets.py    作者:QuantEcon    | 项目源码 | 文件源码
def grabLicence(self):
            """
            Returns True if a CPLEX licence can be obtained.
            The licence is kept until releaseLicence() is called.
            """
            status = ctypes.c_int()
            # If the config file allows to do so (non null params), try to
            # grab a runtime license.
            if ilm_cplex_license and ilm_cplex_license_signature:
                runtime_status = CPLEX_DLL.lib.CPXsetstaringsol(
                        ilm_cplex_license,
                        ilm_cplex_license_signature)
                # if runtime_status is not zero, running with a runtime
                # license will fail. However, no error is thrown (yet)
                # because the second call might still succeed if the user
                # has another license. Let us forgive bad user
                # configuration:
                if not (runtime_status == 0) and self.msg:
                    print(
                    "CPLEX library failed to load the runtime license" +
                    "the call returned status=%s" % str(runtime_status) +
                    "Please check the pulp config file.")
            self.env = CPLEX_DLL.lib.CPXopenCPLEX(ctypes.byref(status))
            self.hprob = None
            if not(status.value == 0):
                raise PulpSolverError("CPLEX library failed on " +
                                    "CPXopenCPLEX status=" + str(status))
项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def shell(inventory, *args, **kwargs):
    env = ansible_env(os.environ.copy())
    cmd = ['ansible-console', '-i', get_inventory_file(inventory)] + list(args)

    if verbosity():
        cmd += ['-' + ('v' * verbosity())]

    logger.info('running %s', ' '.join(cmd))
    logger.debug('env: %r', env)

    return call(cmd, start_new_session=True, env=env, **kwargs)
项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def _ssh(ip, timeout, *args, **kwargs):
    call_args = [
        'ssh', ip, '-t', '-A']

    call_args += ['-o', 'ConnectTimeout %d' % timeout]

    if args:
        call_args += list(args)

    logger.info('Running %s' % ' '.join(map(quote, call_args)))

    return call(call_args, **kwargs)
项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def ssh_shell(self, cmd=None, cd=True, popen=False, **kwargs):
        """
        Create an interactive ssh shell on the remote VM

        :return: subprocess32.Popen
        """
        call_args = [
            'ssh', self.ip(), '-t', '-A',
            '-l', 'vagrant',
            '-i', self.ssh_key()]
        if cmd:
            if isinstance(cmd, tuple) or isinstance(cmd, list):
                cmd = ' '.join(map(quote, cmd))

            if cd:
                cmd = '[ ! -d "{0}" ] && exit {1}; cd "{0}"; {2}'.format(
                    self.project.name(),
                    self.NO_PROJECT_DIR,
                    cmd
                )
            call_args.append(cmd)
        self._logger.debug('calling %s', ' '.join(call_args))

        if popen:
            return Popen(call_args, start_new_session=True, **kwargs)
        return call(call_args, **kwargs)
项目:TigerHost    作者:naphatkrit    | 项目源码 | 文件源码
def secret(ctx, commands):
    """Executes git commands in the secret directory. This literally forwards all the arguments to git.

    tigerhost-deploy secret push origin master

    becomes:

    git push origin master

    in the secret directory.
    """
    with contextmanagers.chdir(secret_dir.secret_dir_path()):
        ret = subprocess.call(['git'] + list(commands))
    ctx.exit(code=ret)
项目:powergslb    作者:AlekseyChudov    | 项目源码 | 文件源码
def _do_exec(self):
        return subprocess32.call(self.monitor['args'], timeout=self.monitor['timeout']) == 0
项目:orthrus    作者:test-pipeline    | 项目源码 | 文件源码
def run_cmd(self, cmd, collect, env=None):

        out = []

        if self.args.verbose:
            self.logr("    CMD: %s" % cmd)

        fh = None
        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            fh = open(self.cov_paths['tmp_out'], 'w')
        else:
            fh = open(os.devnull, 'w')

        if env is None:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, executable='/bin/bash')
        else:
            subprocess.call(cmd, stdin=None,
                            stdout=fh, stderr=subprocess.STDOUT, shell=True, env=env, executable='/bin/bash')

        fh.close()

        if self.args.disable_cmd_redirection or collect == self.Want_Output:
            with open(self.cov_paths['tmp_out'], 'r') as f:
                for line in f:
                    out.append(line.rstrip('\n'))

        return out
项目:PH5    作者:PIC-IRIS    | 项目源码 | 文件源码
def initialize_ph5 (self) :
        '''   Set up processing directory structure and set M from existing mini files   '''
        if self.home == None : return
        if not os.path.exists (self.home) :
            try :
                os.makedirs (self.home)
            except Exception as e :
                raise FormaIOError (4, "Failed to create output directory: {0}".format (self.home))

        for m in self.nmini :
            os.chdir (self.home)
            if not os.path.exists (m) :
                os.mkdir (m)

            try :
                os.chdir (m)
                subprocess.call ('initialize_ph5 -n master', shell=True, stdout=open (os.devnull, 'w'), stderr=open (os.devnull, 'w'))
            except Exception as e :
                raise FormaIOError (5, "Failed to initialize {0}".format (os.path.join (self.home, m)))

            files = os.listdir ('.')
            minis = filter (lambda a : a[0:5] == 'miniP' and a[-3:] == 'ph5', files)
            if len (minis) :
                if self.M == None or len (minis) > self.M :
                    self.M = len (minis)

        os.chdir (self.whereami)
项目:posm-imagery-api    作者:mojodna    | 项目源码 | 文件源码
def place_file(self, id, source_path):
    target_dir = os.path.join(IMAGERY_PATH, id)
    if not os.path.exists(target_dir):
        os.mkdir(target_dir)
    output_file = os.path.abspath(os.path.join(target_dir, 'index.tif'))

    # rewrite with gdal_translate
    gdal_translate = [
        'gdal_translate',
        source_path,
        output_file,
        '-co', 'TILED=yes',
        '-co', 'COMPRESS=DEFLATE',
        '-co', 'PREDICTOR=2',
        '-co', 'BLOCKXSIZE=512',
        '-co', 'BLOCKYSIZE=512',
        '-co', 'NUM_THREADS=ALL_CPUS',
    ]

    started_at = datetime.utcnow()

    self.update_state(state='RUNNING',
                      meta={
                        'name': 'preprocess',
                        'started_at': started_at.isoformat(),
                        'status': 'Rewriting imagery'
                      })

    try:
        returncode = subprocess.call(gdal_translate, timeout=TASK_TIMEOUT)
    except subprocess.TimeoutExpired as e:
        raise Exception(json.dumps({
            'name': 'preprocess',
            'started_at': started_at.isoformat(),
            'command': ' '.join(gdal_translate),
            'status': 'Timed out'
        }))

    if returncode != 0:
        raise Exception(json.dumps({
            'name': 'preprocess',
            'started_at': started_at.isoformat(),
            'command': ' '.join(gdal_translate),
            'return_code': returncode,
            'status': 'Failed'
        }))

    if not source_path.startswith(('/vsicurl', 'http://', 'https://')):
        # delete original
        os.unlink(source_path)

    return {
        'name': 'preprocess',
        'completed_at': datetime.utcnow().isoformat(),
        'started_at': started_at,
        'status': 'Image pre-processing completed'
    }
项目:posm-imagery-api    作者:mojodna    | 项目源码 | 文件源码
def create_warped_vrt(self, id):
    raster_path = os.path.abspath(os.path.join(IMAGERY_PATH, id, 'index.tif'))
    vrt_path = os.path.abspath(os.path.join(IMAGERY_PATH, id, 'index.vrt'))
    meta = get_metadata(id)
    approximate_zoom = meta['meta']['approximateZoom']

    # create a warped VRT to reproject on the fly
    gdalwarp = [
        'gdalwarp',
        raster_path,
        vrt_path,
        '-r', 'cubic',
        '-t_srs', 'epsg:3857',
        '-overwrite',
        '-of', 'VRT',
        '-te', '-20037508.34', '-20037508.34', '20037508.34', '20037508.34',
        '-ts', str(2 ** approximate_zoom * 256), str(2 ** approximate_zoom * 256),
    ]

    # add an alpha band (for NODATA) if one wasn't already included
    if meta['meta']['bandCount'] < 4:
        gdalwarp.append('-dstalpha')

    started_at = datetime.utcnow()

    self.update_state(state='RUNNING',
                      meta={
                        'name': 'warped-vrt',
                        'started_at': started_at.isoformat(),
                        'status': 'Creating warped VRT'
                      })

    try:
        returncode = subprocess.call(gdalwarp, timeout=TASK_TIMEOUT)
    except subprocess.TimeoutExpired as e:
        raise Exception(json.dumps({
            'name': 'warped-vrt',
            'started_at': started_at.isoformat(),
            'command': ' '.join(gdalwarp),
            'status': 'Timed out'
        }))

    if returncode != 0:
        raise Exception(json.dumps({
            'name': 'warped-vrt',
            'started_at': started_at.isoformat(),
            'command': ' '.join(gdalwarp),
            'return_code': returncode,
            'status': 'Failed'
        }))

    return {
        'completed_at': datetime.utcnow().isoformat(),
        'started_at': started_at,
        'status': 'Warped VRT creation completed'
    }
项目:MatchingMarkets.py    作者:QuantEcon    | 项目源码 | 文件源码
def actualSolve(self, lp):
        """Solve a well formulated lp problem"""
        if not self.executable(self.path):
            raise PulpSolverError("PuLP: cannot execute "+self.path)
        if not self.keepFiles:
            pid = os.getpid()
            tmpLp = os.path.join(self.tmpDir, "%d-pulp.lp" % pid)
            tmpSol = os.path.join(self.tmpDir, "%d-pulp.sol" % pid)
        else:
            tmpLp = lp.name+"-pulp.lp"
            tmpSol = lp.name+"-pulp.sol"
        lp.writeLP(tmpLp, writeSOS = 0)
        proc = ["glpsol", "--cpxlp", tmpLp, "-o", tmpSol]
        if not self.mip: proc.append('--nomip')
        proc.extend(self.options)

        self.solution_time = clock()
        if not self.msg:
            proc[0] = self.path
            pipe = open(os.devnull, 'w')
            if operating_system == 'win':
                # Prevent flashing windows if used from a GUI application
                startupinfo = subprocess.STARTUPINFO()
                startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                rc = subprocess.call(proc, stdout = pipe, stderr = pipe,
                                     startupinfo = startupinfo)
            else:
                rc = subprocess.call(proc, stdout = pipe, stderr = pipe)
            if rc:
                raise PulpSolverError("PuLP: Error while trying to execute "+self.path)
        else:
            if os.name != 'nt':
                rc = os.spawnvp(os.P_WAIT, self.path, proc)
            else:
                rc = os.spawnv(os.P_WAIT, self.executable(self.path), proc)
            if rc == 127:
                raise PulpSolverError("PuLP: Error while trying to execute "+self.path)
        self.solution_time += clock()

        if not os.path.exists(tmpSol):
            raise PulpSolverError("PuLP: Error while executing "+self.path)
        lp.status, values = self.readsol(tmpSol)
        lp.assignVarsVals(values)
        if not self.keepFiles:
            try: os.remove(tmpLp)
            except: pass
            try: os.remove(tmpSol)
            except: pass
        return lp.status
项目:MatchingMarkets.py    作者:QuantEcon    | 项目源码 | 文件源码
def actualSolve(self, lp):
        """Solve a well formulated lp problem"""
        if not self.executable(self.path):
            raise PulpSolverError("PuLP: cannot execute "+self.path)
        if not self.keepFiles:
            pid = os.getpid()
            tmpLp = os.path.join(self.tmpDir, "%d-pulp.lp" % pid)
            tmpSol = os.path.join(self.tmpDir, "%d-pulp.sol" % pid)
        else:
            tmpLp = lp.name+"-pulp.lp"
            tmpSol = lp.name+"-pulp.sol"
        lp.writeLP(tmpLp, writeSOS = 1)
        try: os.remove(tmpSol)
        except: pass
        cmd = self.path
        cmd += ' ' + ' '.join(['%s=%s' % (key, value)
                    for key, value in self.options])
        cmd += ' ResultFile=%s' % tmpSol
        if lp.isMIP():
            if not self.mip:
                warnings.warn('GUROBI_CMD does not allow a problem to be relaxed')
        cmd += ' %s' % tmpLp
        if self.msg:
            pipe = None
        else:
            pipe = open(os.devnull, 'w')

        return_code = subprocess.call(cmd.split(), stdout = pipe, stderr = pipe)

        if return_code != 0:
            raise PulpSolverError("PuLP: Error while trying to execute "+self.path)
        if not self.keepFiles:
            try: os.remove(tmpLp)
            except: pass
        if not os.path.exists(tmpSol):
            warnings.warn('GUROBI_CMD does provide good solution status of non optimal solutions')
            status = LpStatusNotSolved
        else:
            status, values, reducedCosts, shadowPrices, slacks = self.readsol(tmpSol)
        if not self.keepFiles:
            try: os.remove(tmpSol)
            except: pass
            try: os.remove("gurobi.log")
            except: pass
        if status != LpStatusInfeasible:
            lp.assignVarsVals(values)
            lp.assignVarsDj(reducedCosts)
            lp.assignConsPi(shadowPrices)
            lp.assignConsSlack(slacks)
        lp.status = status
        return status
项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def fix_anomalies(self):
        to_prune = []

        # machine_index = MachineIndex()
        for uuid, exports in six.iteritems(self.exports):
            # machine = machine_index.get_by_uuid(uuid)
            # machine cannot be found in the index
            # if not machine:
            #    to_prune.append(uuid)
            #    continue

            # one of the path does not exists anymore
            if [path for path in exports if not os.path.exists(path)]:
                to_prune.append(uuid)
                continue

        # remove all exports that have issues
        for uuid in to_prune:
            logger.info('pruning NFS entry for %s' % uuid)

            # taken from vagrant/plugins/hosts/linux/cap/nfs.rb
            extended_re_flag = '-r'
            sed_expr = '\\\x01^# VAGRANT-BEGIN:( {user})? {id}\x01,' \
                       '\\\x01^# VAGRANT-END:( {user})? {id}\x01 d'.format(
                           id=uuid,
                           user=os.getuid()
                       )
            if platform.system() == 'Darwin':
                extended_re_flag = '-E'
                sed_expr = '/^# VAGRANT-BEGIN:( {user})? {id}/,' \
                           '/^# VAGRANT-END:( {user})? {id}/ d'.format(
                               id=uuid,
                               user=os.getuid()
                           )

            cmd = [
                'sed',
                extended_re_flag,
                '-e',
                sed_expr,
                '-ibak',
                self.export_file
            ]

            # if we do not have write access, use sudo
            if not os.access(self.export_file, os.W_OK):
                cmd = [
                    'sudo',
                    '-p'
                    'Fixing invalid NFS exports. Administrators privileges '
                    'are required\n[sudo] password for %u',
                    '--'
                ] + cmd

            if call(cmd) != 0:
                raise RuntimeError('could not prune invalid nfs exports '
                                   '"%s" from /etc/exports' % uuid)
项目:AerisCloud    作者:AerisCloud    | 项目源码 | 文件源码
def run(pro, *args, **kwargs):
    """
    Run vagrant within a project
    :param pro: .project.Project
    :param args: list[string]
    :param kwargs: dict[string,string]
    :return:
    """
    with cd(pro.folder()):
        # fix invalid exports for vagrant
        NFS().fix_anomalies()

        new_env = ansible_env(os.environ.copy())

        new_env['PATH'] = os.pathsep.join([
            new_env['PATH'],
            os.path.join(aeriscloud_path, 'venv/bin')
        ])
        new_env['VAGRANT_DOTFILE_PATH'] = pro.vagrant_dir()
        new_env['VAGRANT_CWD'] = pro.vagrant_working_dir()
        new_env['VAGRANT_DISKS_PATH'] = os.path.join(data_dir(), 'disks')

        # We might want to remove that or bump the verbosity level even more
        if verbosity() >= 4:
            new_env['VAGRANT_LOG'] = 'info'

        new_env['AERISCLOUD_PATH'] = aeriscloud_path
        new_env['AERISCLOUD_ORGANIZATIONS_DIR'] = os.path.join(data_dir(),
                                                               'organizations')

        org = default_organization()
        if org:
            new_env['AERISCLOUD_DEFAULT_ORGANIZATION'] = org

        organization_name = pro.organization()
        if organization_name:
            organization = Organization(organization_name)
        else:
            organization = Organization(org)

        basebox_url = organization.basebox_url()
        if basebox_url:
            new_env['VAGRANT_SERVER_URL'] = basebox_url

        args = ['vagrant'] + list(args)
        logger.debug('running: %s\nenv: %r', ' '.join(args), new_env)

        # support for the vagrant prompt
        if args[1] == 'destroy':
            return call(args, env=new_env, **kwargs)
        else:
            process = Popen(args, env=new_env, stdout=PIPE,
                            bufsize=1, **kwargs)
            for line in iter(process.stdout.readline, b''):
                timestamp(line[:-1])
            # empty output buffers
            process.poll()
            return process.returncode
项目:teddypi    作者:mobilegenome    | 项目源码 | 文件源码
def get_file_dic(path, options):
    '''
    This function opens supplied VCF files and saves metadata and variants in the dictionary spec_data.
    '''
    global sample_list

    spec_data = []  # create empty list that will be populated which dictionaries of VCF file information
    file_list = [f for f in os.listdir(path) if
                 options.file_pattern in f]  # get file list from input path and fname pattern

    for fname in file_list:

        if not options.inverse_deletions:  # retroseq results

            # DO VCF to BED conversion
            if fname.endswith(".vcf"):
                subprocess32.call(["vcf2bed", "-d"], stdin=open(os.path.join(path, fname)), stdout=open(os.path.join(options.out_path, fname.replace(".vcf", ".bed")),"w"))
                fname = fname.replace(".vcf", ".bed")
            else:
                copyfile(os.path.join(path, fname), os.path.join(options.out_path,fname))

            fname_splitted = fname.split(".")

            species_dict = {"sample": fname_splitted[0],
                            "fname": fname,
                            "ftype": fname_splitted[-1],
                            "meitype": fname_splitted[3],
                            "f_bedtools": BedTool(os.path.join(options.out_path, fname)),
                            "fname_sm": fname.replace(".gq.bed", ".sorted.merged.gq.bed")}

        elif options.inverse_deletions:

            fname_splitted = fname.split(".")
            species_dict = {"sample": fname_splitted[0],
                            "fname": fname,
                            "ftype": fname_splitted[-1],
                            "meitype": fname_splitted[-2],  #fname_splitted[2],
                            "f_bedtools": BedTool(os.path.join(path, fname)).saveas(os.path.join(options.out_path,fname)),
                            "fname_sm": fname.replace(".bed", ".sorted.merged.bed")}

        print "\n loading %s" % fname,
        print "\t BedTool object length: %i" % (len(species_dict.get("f_bedtools"))),

        if len(species_dict.get("f_bedtools")) < 3 or species_dict.get(
                "meitype") == "DNA":  # filter out empty BedTool object and DNA insetions
            continue
        print "\t performing analyses: ",
        for analyses in prep_analyses:  # perform initial analyses
            print "\t %s" % analyses.__name__,
            species_dict["f_bedtools"] = analyses(species_dict.get("f_bedtools")).saveas(os.path.join(options.out_path, species_dict.get("fname_sm"))) #.saveas(os.path.join(options.out_path, species_dict.get("fname_sm"))) # save again to dictionary

        # species_dict.get("f_bedtools").saveas(
        #     os.path.join(options.out_path, species_dict.get("fname_sm")))  # save to file
        spec_data.append(species_dict)  # append to list
    sample_list = set([l.get("sample") for l in spec_data])

    return spec_data