我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用os.mkdir()。
def __init__(self, filename, folder=None, classifier=None): """ :param filename: image with sudoku :param folder: folder where to save debug images :param classifier: digit classifier """ self.filename = os.path.basename(filename) image = cv2.imread(filename) self.image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) self.folder = folder or FOLDER os.mkdir(os.path.join(self.folder, 'debug/')) self.classifier = classifier or DigitClassifier() # Default initial values self.perspective = False self.debug = True self.counter = 0 self.step = -1
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'COLT2016' if os.path.exists(dir_name) is False: os.mkdir(dir_name) maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl pbar.log('http://jmlr.org/proceedings/papers/v49/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://jmlr.org/proceedings/papers/v49/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def save_script_rc(script_path="scripts/scriptrc", **env_vars): """ Write an rc file in the charm-delivered directory containing exported environment variables provided by env_vars. Any charm scripts run outside the juju hook environment can source this scriptrc to obtain updated config information necessary to perform health checks or service changes. """ juju_rc_path = "%s/%s" % (charm_dir(), script_path) if not os.path.exists(os.path.dirname(juju_rc_path)): os.mkdir(os.path.dirname(juju_rc_path)) with open(juju_rc_path, 'wb') as rc_script: rc_script.write( "#!/bin/bash\n") [rc_script.write('export %s=%s\n' % (u, p)) for u, p in six.iteritems(env_vars) if u != "script_path"]
def register(name): # hit api to see if name is already registered if check_name(name)['status'] == 'error': print('{} already registered.'.format(name)) else: # generate new keypair (pub, priv) = rsa.newkeys(512) if os.path.exists(KEY_LOCATION) == False: os.mkdir(KEY_LOCATION) # save to disk with open('{}/.key'.format(KEY_LOCATION), 'wb') as f: pickle.dump((pub, priv), f, pickle.HIGHEST_PROTOCOL) r = requests.post('{}/names'.format(API_LOCATION), data = {'name' : name, 'n' : pub.n, 'e' : pub.e}) if r.json()['status'] == 'success': print('Successfully registered new name: {}'.format(name)) else: print('Error registering name: {}'.format(name))
def __init__(self): Analyzer.__init__(self) # Get config parameters self.path = self.getParam('config.blocklistpath', None, 'No path to blocklists provided.') self.ignoreolderthandays = self.getParam('config.ignoreolderthandays', 365) self.utc = pytz.UTC self.now = dt.datetime.now(tz=self.utc) # Check if directory exists if not os.path.exists(self.path): os.mkdir(self.path, 0700) # Downloading/updating the list is implemented with an external cronjob which git pulls the repo # Read files in the given path and prepare file lists for ip- and netsets files = os.listdir(self.path) self.ipsets = [] self.netsets = [] for file in files: if '.ipset' in file: self.ipsets.append(file) elif '.netset' in file: self.netsets.append(file)
def setUp(self): tempFile = tempfile.NamedTemporaryFile() self.fileServerDir = tempFile.name tempFile.close() os.mkdir(self.fileServerDir) os.environ['PYUPDATER_FILESERVER_DIR'] = self.fileServerDir privateKey = ed25519.SigningKey(PRIVATE_KEY.encode('utf-8'), encoding='base64') signature = privateKey.sign(six.b(json.dumps(VERSIONS, sort_keys=True)), encoding='base64').decode() VERSIONS['signature'] = signature keysFilePath = os.path.join(self.fileServerDir, 'keys.gz') with gzip.open(keysFilePath, 'wb') as keysFile: keysFile.write(json.dumps(KEYS, sort_keys=True)) versionsFilePath = os.path.join(self.fileServerDir, 'versions.gz') with gzip.open(versionsFilePath, 'wb') as versionsFile: versionsFile.write(json.dumps(VERSIONS, sort_keys=True)) os.environ['WXUPDATEDEMO_TESTING'] = 'True' from wxupdatedemo.config import CLIENT_CONFIG self.clientConfig = CLIENT_CONFIG self.clientConfig.PUBLIC_KEY = PUBLIC_KEY
def setUp(self): tempFile = tempfile.NamedTemporaryFile() self.fileServerDir = tempFile.name tempFile.close() os.mkdir(self.fileServerDir) os.environ['PYUPDATER_FILESERVER_DIR'] = self.fileServerDir privateKey = ed25519.SigningKey(PRIVATE_KEY.encode('utf-8'), encoding='base64') signature = privateKey.sign(six.b(json.dumps(VERSIONS, sort_keys=True)), encoding='base64').decode() VERSIONS['signature'] = signature keysFilePath = os.path.join(self.fileServerDir, 'keys.gz') with gzip.open(keysFilePath, 'wb') as keysFile: keysFile.write(json.dumps(KEYS, sort_keys=True)) versionsFilePath = os.path.join(self.fileServerDir, 'versions.gz') with gzip.open(versionsFilePath, 'wb') as versionsFile: versionsFile.write(json.dumps(VERSIONS, sort_keys=True)) os.environ['WXUPDATEDEMO_TESTING'] = 'True' from wxupdatedemo.config import CLIENT_CONFIG self.clientConfig = CLIENT_CONFIG self.clientConfig.PUBLIC_KEY = PUBLIC_KEY self.clientConfig.APP_NAME = APP_NAME
def test_install(): tempdir = mkdtemp() def get_supported(): return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')] whl = WheelFile(TESTWHEEL, context=get_supported) assert whl.supports_current_python(get_supported) try: locs = {} for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'): locs[key] = os.path.join(tempdir, key) os.mkdir(locs[key]) whl.install(overrides=locs) assert len(os.listdir(locs['purelib'])) == 0 assert check(locs['platlib'], 'hello.pyd') assert check(locs['platlib'], 'hello', 'hello.py') assert check(locs['platlib'], 'hello', '__init__.py') assert check(locs['data'], 'hello.dat') assert check(locs['headers'], 'hello.dat') assert check(locs['scripts'], 'hello.sh') assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD') finally: shutil.rmtree(tempdir)
def init_work_dir(self): retval = os.getcwd() print '#current dir is : ' + retval # ?????? store_dir = retval + os.sep + 'tmp' print '#all imgs are going to be stored in dir :' + store_dir if not os.path.exists(store_dir): print '#tmp dir does not exist, attemp to mkdir' os.mkdir(store_dir) print '#mkdir sucessfully' else: print '#tmp dir is already exist' self.store_dir = store_dir # print '#now change current dir to tmp' # os.chdir(store_dir) #no neccessary # print os.getcwd()
def create(self, data): if not os.path.exists(const.REPOS_DIR): os.mkdir(const.REPOS_DIR) repo_path = os.path.join(const.REPOS_DIR, data['repo_name']) if os.path.exists(repo_path): logger.debug('Repo directory exists. Removing...') shutil.rmtree(repo_path) user_key = data.get('user_key', '') if user_key: self._create_key_file(data['repo_name'], user_key) os.environ['GIT_SSH'] = self._get_ssh_cmd(data['repo_name']) repo = Repo.clone_from(data['git_url'], repo_path) instance = super(GitRepo, self).create(data) instance.repo = repo return instance
def single_stat_check(args, filename): try: with open(args.container + "/" + filename, "r") as f: stat = f.read().strip() except Exception, e: if not os.path.isdir(args.container): os.mkdir(args.container) # first time running for this container, bootstrap with empty zero stat = "0" f = open(args.container + "/" + filename,"w") f.write(str(stat) + '\n') f.close() return stat # helper function to update single stats
def single_stat_update(args, container_dir, filename): pipe = os.popen("docker exec " + args.container + " cat " + container_dir + "/" + filename + " 2>&1") for line in pipe: stat = line pipe.close() # test that the docker command succeeded and pipe contained data if not 'stat' in locals(): stat = "" try: f = open(args.container + "/" + filename,"w") f.write(stat) f.close() except Exception, e: if not os.path.isdir(args.container): os.mkdir(args.container) with open(args.container + "/" + filename, "w") as f: f.write(stat) return stat # helper function to gather stat type data (multiple rows of key value pairs)
def multi_stat_check(args, filename): dict = {} try: with open(args.container + "/" + filename, "r") as f: for line in f: m = _STAT_RE.match(line) if m: dict[m.group(1)] = m.group(2) except Exception, e: if not os.path.isdir(args.container): os.mkdir(args.container) debug(args.container + ": could not get last stats from " + filename) debug(str(e)) # first time running for this container create empty file open(args.container + "/" + filename,"w").close() return dict
def _test_NoAccessDir(self, nodeName): devBooter, devMgr = self.launchDeviceManager("/nodes/%s/DeviceManager.dcd.xml" % nodeName) device = devMgr._get_registeredDevices()[0] fileMgr = self._domMgr._get_fileMgr() dirname = '/noaccess' testdir = os.path.join(scatest.getSdrPath(), 'dom' + dirname) if not os.path.exists(testdir): os.mkdir(testdir, 0000) else: os.chmod(testdir, 0000) try: self.assertFalse(os.access(testdir, os.R_OK|os.X_OK), 'Current user can still access directory') self.assertRaises(CF.LoadableDevice.LoadFail, device.load, fileMgr, dirname, CF.LoadableDevice.SHARED_LIBRARY) finally: os.rmdir(testdir)
def test_ExistsException(self): self.assertNotEqual(self._domMgr, None) fileMgr = self._domMgr._get_fileMgr() # Makes sure that FileSystem::exists() throws correct exception and # doesn't kill domain for files in directories it cannot access dirname = '/noaccess' testdir = os.path.join(scatest.getSdrPath(), 'dom' + dirname) if not os.path.exists(testdir): os.mkdir(testdir, 0644) else: os.chmod(testdir, 0644) try: self.assertFalse(os.access(testdir, os.R_OK|os.X_OK), 'Current user can still access directory') self.assertRaises(CF.InvalidFileName, fileMgr.exists, os.path.join(dirname, 'testfile')) finally: os.rmdir(testdir)
def setFile(self, filename): self.filename = filename if self.filename.count('/') > 0: aggregate = os.path.join('/') if self.filename.startswith('/') == False : aggregate = os.path.join(os.getcwd()+'/') dirs = self.filename.split('/') for _dir in dirs[:-1]: if _dir == '': continue aggregate = os.path.join(aggregate,_dir) try: os.mkdir(aggregate) except Exception, e: if type(e) == exceptions.OSError and e.errno == 13: print e pass self.baseFilename = os.path.abspath(filename) self.log4pyProps['filename'] = filename
def save_script_rc(script_path="scripts/scriptrc", **env_vars): """ Write an rc file in the charm-delivered directory containing exported environment variables provided by env_vars. Any charm scripts run outside the juju hook environment can source this scriptrc to obtain updated config information necessary to perform health checks or service changes. """ juju_rc_path = "%s/%s" % (charm_dir(), script_path) if not os.path.exists(os.path.dirname(juju_rc_path)): os.mkdir(os.path.dirname(juju_rc_path)) with open(juju_rc_path, 'wt') as rc_script: rc_script.write( "#!/bin/bash\n") [rc_script.write('export %s=%s\n' % (u, p)) for u, p in six.iteritems(env_vars) if u != "script_path"]
def download_lsun(dirpath): data_dir = os.path.join(dirpath, 'lsun') if os.path.exists(data_dir): print('Found LSUN - skip') return else: os.mkdir(data_dir) tag = 'latest' #categories = _list_categories(tag) categories = ['bedroom'] for category in categories: _download_lsun(data_dir, category, 'train', tag) _download_lsun(data_dir, category, 'val', tag) _download_lsun(data_dir, '', 'test', tag)
def download_mnist(dirpath): data_dir = os.path.join(dirpath, 'mnist') if os.path.exists(data_dir): print('Found MNIST - skip') return else: os.mkdir(data_dir) url_base = 'http://yann.lecun.com/exdb/mnist/' file_names = ['train-images-idx3-ubyte.gz','train-labels-idx1-ubyte.gz','t10k-images-idx3-ubyte.gz','t10k-labels-idx1-ubyte.gz'] for file_name in file_names: url = (url_base+file_name).format(**locals()) print(url) out_path = os.path.join(data_dir,file_name) cmd = ['curl', url, '-o', out_path] print('Downloading ', file_name) subprocess.call(cmd) cmd = ['gzip', '-d', out_path] print('Decompressing ', file_name) subprocess.call(cmd)
def download_file(self, link_text, link_address): def download_cancel(self, *args): setattr(retrieve_progress_load, "flag", 0) progress_load.body.dismiss() path_to_folder = self.downloadfolder[self.downloadkey] if not os.path.exists(path_to_folder): os.mkdir(path_to_folder) progress_load = \ ProgressLoad(retrieve_callback=retrieve_progress_load, events_callback=download_cancel, text_button_cancel=core.string_lang_button_cancel, text_already_loaded=core.string_lang_already_loaded, text_total_size=core.string_lang_total_size) progress_load.show(link_address, "{}/{}".format(path_to_folder, os.path.split(link_address)[1]))
def index_reference(self, in_fasta_fn, in_gtf_fn, num_threads=1, sa_sparse_d=None, sa_index_n_bases=None, chr_bin_n_bits=None, limit_ram=None): if os.path.exists(self.reference_star_path): raise Exception('STAR reference path %s already exists' % self.reference_star_path) os.mkdir(self.reference_star_path) args = ['STAR', '--runMode', 'genomeGenerate', '--genomeDir', self.reference_star_path, '--runThreadN', str(num_threads), '--genomeFastaFiles', in_fasta_fn, '--sjdbGTFfile', in_gtf_fn] if limit_ram is not None: args += ['--limitGenomeGenerateRAM', str(limit_ram)] if sa_sparse_d is not None: args += ['--genomeSAsparseD', str(sa_sparse_d)] if sa_index_n_bases is not None: args += ['--genomeSAindexNbases', str(sa_index_n_bases)] if chr_bin_n_bits is not None: args += ['--genomeChrBinNbits', str(chr_bin_n_bits)] subprocess.check_call(args)
def __init__(self, dirname, factory=rfc822.Message, create=True): """Initialize a Maildir instance.""" Mailbox.__init__(self, dirname, factory, create) self._paths = { 'tmp': os.path.join(self._path, 'tmp'), 'new': os.path.join(self._path, 'new'), 'cur': os.path.join(self._path, 'cur'), } if not os.path.exists(self._path): if create: os.mkdir(self._path, 0700) for path in self._paths.values(): os.mkdir(path, 0o700) else: raise NoSuchMailboxError(self._path) self._toc = {} self._toc_mtimes = {} for subdir in ('cur', 'new'): self._toc_mtimes[subdir] = os.path.getmtime(self._paths[subdir]) self._last_read = time.time() # Records last time we read cur/new self._skewfactor = 0.1 # Adjust if os/fs clocks are skewing
def createDir(name, force=False): if os.path.exists(name): if force: shutil.rmtree(name) else: response = raw_input('%s already exists. Do you wish to overwrite it? (y/n) ' % name) if response.lower() == 'y' or response.lower() == 'yes': shutil.rmtree(name) elif response.lower() == 'n' or response.lower() == 'no': print 'Modeler aborted.' exit(0) else: print 'Response not understood.' print 'Modeler aborted.' exit(1) os.mkdir(name)
def __init__(self, actions, epsilon=1, n_history=4, on_gpu=False, model_path="", load_if_exist=True): self.actions = actions self.epsilon = epsilon self.q = Q(n_history, len(actions), on_gpu) self._state = [] self._observations = [ np.zeros((self.q.SIZE, self.q.SIZE), np.float32), np.zeros((self.q.SIZE, self.q.SIZE), np.float32) ] # now & pre self.last_action = 0 self.model_path = model_path if model_path else os.path.join(os.path.dirname(__file__), "./store") if not os.path.exists(self.model_path): print("make directory to store model at {0}".format(self.model_path)) os.mkdir(self.model_path) else: models = self.get_model_files() if load_if_exist and len(models) > 0: print("load model file {0}.".format(models[-1])) serializers.load_npz(os.path.join(self.model_path, models[-1]), self.q) # use latest model
def dump(self, main_loop): """Overwrites MainLoopDumpManager.dump().""" if not os.path.exists(self.folder): os.mkdir(self.folder) print "" logger.info(" Saving model") start = time.time() logger.info(" ...saving parameters") self.dump_parameters(main_loop) logger.info(" ...saving iteration state") secure_pickle_dump(main_loop.iteration_state, self.path_to_iteration_state) logger.info(" ...saving log") secure_pickle_dump(main_loop.log, self.path_to_log) if self.save_accumulators: logger.info(" ...saving algorithm") self.dump_accumulators(main_loop) logger.info(" Model saved, took {} seconds.".format(time.time()-start))
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'ICML2015' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl pbar.log('http://jmlr.org/proceedings/papers/v37/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://jmlr.org/proceedings/papers/v37/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'ICML2016' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl pbar.log('http://jmlr.org/proceedings/papers/v48/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://jmlr.org/proceedings/papers/v48/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="/paper/(.+?)"' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'NIPS2012' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl + '.pdf' pbar.log('http://papers.nips.cc/paper/' + pdfurl + '.pdf') if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://papers.nips.cc/paper/' + pdfurl + '.pdf', filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="/paper/(.+?)"' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'NIPS2016' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl + '.pdf' pbar.log('http://papers.nips.cc/paper/' + pdfurl + '.pdf') if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://papers.nips.cc/paper/' + pdfurl + '.pdf', filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="/paper/(.+?)"' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'NIPS2013' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl + '.pdf' pbar.log('http://papers.nips.cc/paper/' + pdfurl + '.pdf') if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://papers.nips.cc/paper/' + pdfurl + '.pdf', filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="/paper/(.+?)"' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'NIPS2014' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl + '.pdf' pbar.log('http://papers.nips.cc/paper/' + pdfurl + '.pdf') if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://papers.nips.cc/paper/' + pdfurl + '.pdf', filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'CVPR2014' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): reg2 = r'papers/(.+?\.pdf)' pdfre2 = re.compile(reg2) filename = dir_name + '/' + re.findall(pdfre2, pdfurl)[0] pbar.log('http://www.cv-foundation.org/openaccess/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://www.cv-foundation.org/openaccess/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'CVPR2016' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): reg2 = r'papers/(.+?\.pdf)' pdfre2 = re.compile(reg2) filename = dir_name + '/' + re.findall(pdfre2, pdfurl)[0] pbar.log('http://www.cv-foundation.org/openaccess/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://www.cv-foundation.org/openaccess/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'CVPR2015' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): reg2 = r'papers/(.+?\.pdf)' pdfre2 = re.compile(reg2) filename = dir_name + '/' + re.findall(pdfre2, pdfurl)[0] pbar.log('http://www.cv-foundation.org/openaccess/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://www.cv-foundation.org/openaccess/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'CVPR2013' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): reg2 = r'papers/(.+?\.pdf)' pdfre2 = re.compile(reg2) filename = dir_name + '/' + re.findall(pdfre2, pdfurl)[0] pbar.log('http://www.cv-foundation.org/openaccess/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://www.cv-foundation.org/openaccess/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish()
def save_testcase(self, ip, testcases): try: count = 0 dir_name = "testcase_{0}".format(ip) print("[\033[92mINFO\033[0m] Client {0} seems to not respond anymore, saving testcases".format(ip)) try: os.mkdir(dir_name) except OSError: pass for test in testcases: with open("{0}/testcase_{1}.json".format(dir_name, count), "wb") as t: t.write(test) t.close() count += 1 except Exception as e: raise PJFBaseException(e.message if hasattr(e, "message") else str(e))
def save_testcase(self, testcase): """ Save all testcases collected during monitoring """ try: if self.config.debug: print("[\033[92mINFO\033[0m] Saving testcase...") dir_name = "testcase_{0}".format(os.path.basename(shlex.split(self.config.process_to_monitor)[0])) try: os.mkdir(dir_name) except OSError: pass for test in testcase: with open("{0}/testcase_{1}.json".format(dir_name, self.testcase_count), "wb") as t: t.write(test) t.close() self.testcase_count += 1 except Exception as e: raise PJFBaseException(e.message if hasattr(e, "message") else str(e))
def mkdir_file(self): """ :return:????????? """ ini = U.ConfigIni() result_file = str(ini.get_ini('test_case', 'log_file')) result_file_every = result_file + '/' + \ time.strftime("%Y-%m-%d_%H_%M_%S{}".format(random.randint(10, 99)), time.localtime(time.time())) file_list = [ result_file, result_file_every, result_file_every + '/log', result_file_every + '/per', result_file_every + '/img', result_file_every + '/status'] if not os.path.exists(result_file): os.mkdir(result_file) for file_path in file_list: if not os.path.exists(file_path): os.mkdir(file_path) return result_file_every
def ignore(dest): try: with open(Git.ignore_file) as f: exists = dest in f.read().splitlines() except IOError: exists = False if not exists: try: ignore_file_parent_directory = os.path.dirname(Git.ignore_file) if not os.path.exists(ignore_file_parent_directory): os.mkdir(ignore_file_parent_directory) with open(Git.ignore_file, 'a') as f: f.write(dest.replace("\\", "/") + '\n') except IOError: error("Unable to write ignore file in \"%s\"" % os.path.join(getcwd(), Git.ignore_file), 1)
def unignore(dest): try: with open(Git.ignore_file) as f: lines = f.read().splitlines() except IOError: lines = [] if dest in lines: lines.remove(dest) try: ignore_file_parent_directory = os.path.dirname(Git.ignore_file) if not os.path.exists(ignore_file_parent_directory): os.mkdir(ignore_file_parent_directory) with open(Git.ignore_file, 'w') as f: f.write('\n'.join(lines) + '\n') except IOError: error("Unable to write ignore file in \"%s\"" % os.path.join(getcwd(), Git.ignore_file), 1) # Repository object
def get_filename(self): view = self.view filename = view.file_name() # create dir in current path with the name of current filename dirname, _ = os.path.splitext(filename) # create new image file under currentdir/filename_without_ext/filename_without_ext%d.png fn_without_ext = os.path.basename(dirname) if not os.path.lexists(dirname): os.mkdir(dirname) i = 0 while True: # relative file path rel_filename = os.path.join("%s/%s%d.png" % (fn_without_ext, fn_without_ext, i)) # absolute file path abs_filename = os.path.join(dirname, "%s%d.png" % ( fn_without_ext, i)) if not os.path.exists(abs_filename): break i += 1 print("save file: " + abs_filename + "\nrel " + rel_filename) return abs_filename, rel_filename
def run_devpi_command(project, logger, params) : reports_dir = project.expand_path("$dir_reports/devpi") if not os.path.exists(reports_dir) : os.mkdir(reports_dir) logger.debug("Executing devpi command %s", params) output_file_path = os.path.join(reports_dir, params[0].replace("/", "")) with open(output_file_path, "w") as output_file : commandexec = ["devpi"] commandexec.extend(params) working_dir = project.expand_path("$dir_dist") process = subprocess.Popen(commandexec, cwd=working_dir, stdout=output_file, stderr=output_file, shell=False) return_code = process.wait() if return_code != 0 : raise BuildFailedException("Error while executing devpi command %s, see %s for details" % (params, output_file_path))
def run_setup_commands(project, logger, commands) : reports_dir = project.expand_path("$dir_reports/distutils") if not os.path.exists(reports_dir) : os.mkdir(reports_dir) setup_script = project.expand_path("$dir_dist/setup.py") for command in commands : logger.debug("Executing distutils command %s", command) output_file_path = os.path.join(reports_dir, command.replace("/", "")) with open(output_file_path, "w") as output_file : commandexec = [sys.executable, setup_script] commandexec.extend(command.split()) working_dir = project.expand_path("$dir_dist") process = subprocess.Popen(commandexec, cwd=working_dir, stdout=output_file, stderr=output_file, shell=False) return_code = process.wait() if return_code != 0 : raise BuildFailedException("Error while executing setup command %s, see %s for details" % (command, output_file_path))
def plotGeneratedImages(epoch,example=100,dim=(10,10),figsize=(10,10)): noise = np.random.normal(0,1,size=(example,randomDim)) generatedImage = generator.predict(noise) generatedImage = generatedImage.reshape(example,28,28) plt.figure(figsize=figsize) for i in range(example): plt.subplot(dim[0],dim[1],i+1) plt.imshow(generatedImage[i],interpolation='nearest',cmap='gray') '''drop the x and y axis''' plt.axis('off') plt.tight_layout() if not os.path.exists('generated_image'): os.mkdir('generated_image') plt.savefig('generated_image/wgan_generated_img_epoch_%d.png' % epoch)
def setup_compiler_paths(self, clang_path): if "ccache" in self.mode.value: cache_dir = os.path.join(self.tmp_directory.path, "ccache") os.mkdir(cache_dir) os.environ["CCACHE_DIR"] = cache_dir if "clang-hash" in self.mode.value: cache_dir = os.path.join(self.tmp_directory.path, "clang-hash-cache") os.mkdir(cache_dir) os.environ["CLANG_HASH_CACHE"] = cache_dir if self.mode.value == "normal": CC = os.path.join(clang_path, "build/wrappers/clang-normal") elif self.mode.value == "clang-hash": # CC = os.path.join(clang_path, "build/wrappers/clang-hash-stop") #elif self.mode.value == "clang-hash-collect": CC = os.path.join(clang_path, "build/wrappers/clang-hash-collect") elif self.mode.value == "ccache-clang-hash": CC = os.path.join(clang_path, "build/wrappers/clang-ccache-hash-stop") elif self.mode.value == "ccache": CC = os.path.join(clang_path, "build/wrappers/clang-ccache") else: raise RuntimeError("Not a valid mode") os.environ['CC'] = CC self.CC = CC
def __init__(self): self.clients = { # "HaobtcCNY": haobtccny.PrivateHaobtcCNY(config.HAOBTC_API_KEY, config.HAOBTC_SECRET_TOKEN), "BrokerCNY": brokercny.PrivateBrokerCNY(), } self.cny_balance = 0 self.btc_balance = 0 self.cny_frozen = 0 self.btc_frozen = 0 self.cny_total = 0 self.btc_total = 0 try: os.mkdir(self.out_dir) except: pass