我们从Python开源项目中,提取了以下48个代码示例,用于说明如何使用os.path.isdir()。
def handle_template(self, template, subdir): """ Determines where the app or project templates are. Use django.__path__[0] as the default because we don't know into which directory Django has been installed. """ if template is None: return path.join(django.__path__[0], 'conf', subdir) else: if template.startswith('file://'): template = template[7:] expanded_template = path.expanduser(template) expanded_template = path.normpath(expanded_template) if path.isdir(expanded_template): return expanded_template if self.is_url(template): # downloads the file and returns the path absolute_path = self.download(template) else: absolute_path = path.abspath(expanded_template) if path.exists(absolute_path): return self.extract(absolute_path) raise CommandError("couldn't handle %s template %s." % (self.app_or_project, template))
def save(self, filepath=None, copy=False): """ Saves all of the NNTPContent into the directory specified by the filepath; if no filepath is specified, then content is written into it's work_dir """ if filepath: if not isdir(filepath) and not mkdir(filepath): # We failed return False for attachment in self.decoded: if not attachment.save(filepath=filepath, copy=copy): return False return True
def dirsize(src): """ Takes a source directory and returns the entire size of all of it's content(s) in bytes. The function returns None if the size can't be properly calculated. """ if not isdir(src): # Nothing to return return 0 try: with pushd(src, create_if_missing=False): size = sum(getsize(f) for f in listdir('.') if isfile(f)) except (OSError, IOError): return None # Return our total size return size
def write(self, out_path, context): out_dir = dirname(out_path) if not isdir(out_dir): mkdir_p(out_dir, self.setup.mkdir_perms, exist_ok=True) self.setup.log.info("format: %s (%s) -> %s" % ( self.file_name, self.file_path, out_path)) automatic_variables = dict( page=self.file_path, root=self.relative_root_path, ) context = context \ .new_child(automatic_variables) t = self.setup.jinja.get_template(self.file_path) #t.stream(context).dump(out_path) self.setup.template_writer.write(t, context, out_path)
def enumerate_backups_entities(): """Enumerates the entities of all the available backups""" if isdir(Backuper.backups_dir): # Look for subdirectories for directory in listdir(Backuper.backups_dir): entity_file = path.join(Backuper.backups_dir, directory, 'entity.tlo') # Ensure the entity.pickle file exists if isfile(entity_file): # Load and yield it with open(entity_file, 'rb') as file: with BinaryReader(stream=file) as reader: try: yield reader.tgread_object() except TypeNotFoundError: # Old user, scheme got updated, don't care. pass #endregion #region Backup exists and deletion
def find_rain(src, paths=[]): if src[0] == '/': paths = [''] elif src[0] != '.': paths = get_paths() + paths for path in paths: if isfile(join(path, src) + '.rn'): return join(path, src) + '.rn' elif isfile(join(path, src)) and src.endswith('.rn'): return join(path, src) elif isdir(join(path, src)) and isfile(join(path, src, '_pkg.rn')): return join(path, src, '_pkg.rn') # find any file from a string
def getAllProgs(): allcats=[name for name in listdir("/usr/portage") if isdir("/usr/portage/"+name)] dic={} for cat in allcats: if re.match("(?P<main>.+)\-(?P<sub>.+)",cat): found=re.match("(?P<main>.+)\-(?P<sub>.+)",cat) if not dic.get(found.group("main")): dic.update({ found.group("main"):{ found.group("sub"):[] } }) else: dic.get( found.group("main") ).update( {found.group("sub"):[] } ) dic.get( found.group("main") ).get( found.group("sub") ).extend( [ name for name in listdir( "/usr/portage/{}-{}".format( found.group("main"),found.group("sub") ) ) if isdir("/usr/portage/{}-{}/{}".\ format(found.group("main"),found.group("sub"),name ) ) ] ) return dic
def show_selected_files(self): selected = self.pane.get_selected_files() dir_folders = 0 dir_files = 0 dir_filesize = 0 if selected: for f in selected: if path.isdir(f): dir_folders += 1 else: dir_files += 1 dir_filesize += stat(f).st_size bc = ByteConverter(dir_filesize) dir_foldersK = str("{0:,}".format(dir_folders)) # old use str(dir_folders) dir_filesK = str("{0:,}".format(dir_files)) # for ' ' instead of ',' .replace(',', ' ') statusbar = "Selected* " statusbar += "Dirs: " + dir_foldersK.rjust(Just.Fd, ' ') + " " statusbar += "Files: " + dir_filesK.rjust(Just.Fl, ' ') + " " statusbar += "? Size: " + str(bc.calc()).rjust(Just.Sz, ' ') show_status_message(statusbar) else: StatusBarExtended.refresh(self)
def new(): form = ProjectForm(request.form) if request.method == 'POST' and form.validate(): user_repo_path = join('repos', form.name.data) if os.path.isdir(user_repo_path): flash(_('This project name already exists'), 'error') else: project = Project(form.name.data, current_user) db.session.add(project) db.session.commit() #project.create_project(form.name.data, current_user) flash(_('Project created successfuly!'), 'info') return redirect(url_for('branches.view', project=form.name.data, branch='master', filename='index')) return render_template('new.html', form=form)
def makeavgdir(self): # create a folder average into the dataset path # avg # |- processed_images # |- aligned_images # |- correlation_images # |- results self.avgpath = join(self.path, "avg") if not isdir(self.avgpath): mkdir(self.avgpath) subfolders = ["processed_images", "aligned_images", "correlation_images", "results"] for folder in subfolders: self.subfolders[folder] = join(self.avgpath, folder) if not isdir(self.subfolders[folder]): mkdir(self.subfolders[folder]) # image operations
def makeavgdir(self): # create a folder average into the dataset path # avg # |- processed_images # |- aligned_images # |- correlation_images # |- results # |- template_rot self.avgpath = join(self.path, "avg") if not isdir(self.avgpath): mkdir(self.avgpath) subfolders = ["processed_images", "aligned_images", "correlation_images", "results", "template_rot"] for folder in subfolders: self.subfolders[folder] = join(self.avgpath, folder) if not isdir(self.subfolders[folder]): mkdir(self.subfolders[folder])
def makeavgdir(self): # create a folder average into the dataset path # avg # |- processed_images # |- aligned_images # |- correlation_images # |- results # |- aligned_rgb_images self.avgpath = join(self.path, "avg") if not isdir(self.avgpath): mkdir(self.avgpath) subfolders = ["aligned_rgb_images", "results", "avg_transition"] for folder in subfolders: self.subfolders[folder] = join(self.avgpath, folder) if not isdir(self.subfolders[folder]): mkdir(self.subfolders[folder])
def makeavgdir(self): ''' create a folder average into the dataset path avg |- processed_images |- aligned_images |- correlation_images |- results |- aligned_rgb_images |- avg_transition ''' self.avgpath = join(self.path, "avg") if not isdir(self.avgpath): mkdir(self.avgpath) subfolders = ["aligned_rgb_images", "results", "avg_transition"] for folder in subfolders: self.subfolders[folder] = join(self.avgpath, folder) if not isdir(self.subfolders[folder]): mkdir(self.subfolders[folder])
def download_model(lang, paths): model_folder = join(paths.user_config, 'model') model_en_folder = join(model_folder, lang) if not isdir(model_folder): mkdir(model_folder) if not isdir(model_en_folder): mkdir(model_en_folder) file_name = paths.model_dir + '.tar.gz' if not isfile(file_name): import urllib.request import shutil url = 'https://github.com/MatthewScholefield/pocketsphinx-models/raw/master/' + lang + '.tar.gz' with urllib.request.urlopen(url) as response, open(file_name, 'wb') as file: shutil.copyfileobj(response, file) import tarfile tar = tarfile.open(file_name) tar.extractall(path=model_en_folder) tar.close()
def save(self, saver): """Save model checkpoint""" # log_dir = self.log_root # if isdir(log_dir): # if isdir(join(log_dir, self.opt['name'])): # print('saving path ' + join(log_dir, self.opt['name'], 'model.max.ckpt')) # saver.save(self.sess, join(log_dir, self.opt['name'], 'model.max.ckpt')) # else: # os.mkdir(self.opt['name']) # print('saving path ' + join(log_dir, self.opt['name'], 'model.max.ckpt')) # saver.save(self.sess, join(log_dir, self.opt['name'], 'model.max.ckpt')) # else: # os.mkdir(self.opt["log_root"]) # if isdir(join(log_dir, self.opt['name'])): # print('saving path ' + join(log_dir, self.opt['name'], 'model.max.ckpt')) # saver.save(self.sess, join(log_dir, self.opt['name'], 'model.max.ckpt')) # else: # os.mkdir(self.opt['name']) # print('saving path ' + join(log_dir, self.opt['name'], 'model.max.ckpt')) # saver.save(self.sess, join(log_dir, self.opt['name'], 'model.max.ckpt')) # save in root folder print('saving path ' + join(self.opt['model_file'], 'model.max.ckpt')) saver.save(self.sess, join(self.opt['model_file'], 'model.max.ckpt'))
def rev_parse_manifest_path(self, cwd): """ Search parent directories for package.json. Starting at the current working directory. Go up one directory at a time checking if that directory contains a package.json file. If it does, return that directory. """ name = 'package.json' manifest_path = path.normpath(path.join(cwd, name)) bin_path = path.join(cwd, 'node_modules/.bin/') if path.isfile(manifest_path) and path.isdir(bin_path): return manifest_path parent = path.normpath(path.join(cwd, '../')) if parent == '/' or parent == cwd: return None return self.rev_parse_manifest_path(parent)
def generate_instance_deployment_directory(username): """ Generates the instance deployment directory for the given username """ directory = username if deploy_config.obfuscate_problem_directories: directory = md5((username + deploy_config.deploy_secret).encode()).hexdigest() root_dir = deploy_config.problem_directory_root if not isdir(root_dir): os.makedirs(root_dir) # make the root not world readable os.chmod(root_dir, 0o751) path = join(root_dir, directory) if not isdir(path): os.makedirs(path) return path
def clean_list(self): ''' Manage the list of items to backup ''' # Case <backup_itens> is empty if self.backup_itens == []: msg = "After version 0.0.4 <backup_itens> cannot be empty" self.log.update_log(msg) raise BaseException(msg) from None # Add items for item in self.backup_itens: if path.isfile(path.abspath(item)) or path.isdir(path.abspath(item)): self.backup_list.append(path.abspath(item)) else: log.update_log("Invalid item. It'll be wiped from backup list: <%s>" % item, 'INFO')
def process_list(self): ''' Process every item from a <backup_list> ''' default_dest = path.abspath(path.join(self.target_folder, self.sub_folder_name)) for item in self.backup_list: if path.isdir(item): folder = path.abspath(item).split('\\')[-1] dest = path.join(default_dest, folder) self.log.update_log('Processing directory: %s' % item, 'INFO') self.process_item(path.abspath(item), dest) else: self.log.update_log('Processing file: %s' % item, 'INFO') self.process_item(path.abspath(item), default_dest)
def set_directory(self, directory): """Set the directory where the downloaded file will be placed. May raise OSError if the supplied directory path is not suitable. """ if not path.exists(directory): raise OSError(errno.ENOENT, "You see no directory there.", directory) if not path.isdir(directory): raise OSError(errno.ENOTDIR, "You cannot put a file into " "something which is not a directory.", directory) if not os.access(directory, os.X_OK | os.W_OK): raise OSError(errno.EACCES, "This directory is too hard to write in to.", directory) self.destDir = directory
def run(self): install.run(self) sys.path.reverse() # ----------- install segmenter ------------ import stanford_segmenter pwd = stanford_segmenter.__path__[0] if not isdir(join(pwd, 'seg')): print('Start downloading stanford-segmenter-2015-12-09.zip...') urlretrieve('http://nlp.stanford.edu/software/stanford-segmenter-2015-12-09.zip', 'seg.zip', report) with zipfile.ZipFile('seg.zip', 'r') as z: z.extractall(pwd) rename(join(pwd, 'stanford-segmenter-2015-12-09'), join(pwd, 'seg')) unlink('seg.zip') # ----------- install postagger ------------ import stanford_postagger pwd = stanford_postagger.__path__[0] if not isdir(join(pwd, 'pos')): print('Start downloading stanford-postagger-full-2015-12-09.zip...') urlretrieve('http://nlp.stanford.edu/software/stanford-postagger-full-2015-12-09.zip', 'pos.zip', report) with zipfile.ZipFile('pos.zip', 'r') as z: z.extractall(pwd) rename(join(pwd, 'stanford-postagger-full-2015-12-09'), join(pwd, 'pos')) unlink('pos.zip')
def touch(self, path, size=None, random=False, perm=None, time=None): """Simplify the dynamic creation of files or the updating of their modified time. If a size is specified, then a file of that size will be created on the disk. If the file already exists, then the size= attribute is ignored (for safey reasons). if random is set to true, then the file created is actually created using tons of randomly generated content. This is MUCH slower but nessisary for certain tests. """ path = abspath(path) if not isdir(dirname(path)): mkdir(dirname(path), 0700) if not exists(path): size = strsize_to_bytes(size) if not random: f = open(path, "wb") if isinstance(size, int) and size > 0: f.seek(size-1) f.write("\0") f.close() else: # fill our file with randomly generaed content with open(path, 'wb') as f: # Fill our file with garbage f.write(urandom(size)) # Update our path utime(path, time) if perm is not None: # Adjust permissions chmod(path, perm) # Return True return True
def test_rar_single_file(self): """ Test that we can rar content """ # Generate temporary folder to work with work_dir = join(self.tmp_dir, 'CodecRar_Test.rar', 'work') # Initialize Codec cr = CodecRar(work_dir=work_dir) # Now we want to prepare a folder filled with temporary content source_dir = join( self.tmp_dir, 'CodecRar_Test.rar.single', 'my_source' ) assert isdir(source_dir) is False # create some dummy file entries for i in range(0, 10): # Create some temporary files to work with in our source # directory tmp_file = join(source_dir, 'DSC_IMG%.3d.jpeg' % i) self.touch(tmp_file, size='120K', random=True) # Add our file to the encoding process cr.add(tmp_file) # Now we want to compress this content content = cr.encode() # We should have successfully encoded our content assert isinstance(content, sortedset) assert len(content) == 1 assert isinstance(content[0], NNTPBinaryContent) # Encoded content is attached by default assert content[0].is_attached() is True
def test_rar_multi_files(self): """ Test that we can rar content into multiple files """ # Generate temporary folder to work with work_dir = join(self.tmp_dir, 'CodecRar_Test.rar.multi', 'work') # Initialize Codec cr = CodecRar(work_dir=work_dir, volume_size='100K') # Now we want to prepare a folder filled with temporary content source_dir = join(self.tmp_dir, 'CodecRar_Test.rar', 'my_source') assert isdir(source_dir) is False # create some dummy file entries for i in range(0, 10): # Create some temporary files to work with in our source # directory tmp_file = join(source_dir, 'DSC_IMG%.3d.jpeg' % i) self.touch(tmp_file, size='100K', random=True) # Add our file to the encoding process cr.add(tmp_file) # Now we want to compress this content content = cr.encode() # We should have successfully encoded our content assert isinstance(content, sortedset) assert len(content) == 11 for c in content: assert isinstance(c, NNTPBinaryContent) # Encoded content is attached by default assert c.is_attached() is True
def test_7z_single_file(self): """ Test that we can compress content """ # Generate temporary folder to work with work_dir = join(self.tmp_dir, 'Codec7Zip_Test.rar', 'work') # Initialize Codec cr = Codec7Zip(work_dir=work_dir) # Now we want to prepare a folder filled with temporary content source_dir = join( self.tmp_dir, 'Codec7Zip_Test.7z.single', 'my_source' ) assert isdir(source_dir) is False # create some dummy file entries for i in range(0, 10): # Create some temporary files to work with in our source # directory tmp_file = join(source_dir, 'DSC_IMG%.3d.jpeg' % i) self.touch(tmp_file, size='120K', random=True) # Add our file to the encoding process cr.add(tmp_file) # Now we want to compress this content content = cr.encode() # We should have successfully encoded our content assert isinstance(content, sortedset) assert len(content) == 1 assert isinstance(content[0], NNTPBinaryContent) # Encoded content is attached by default assert content[0].is_attached() is True
def test_par_single_file(self): """ Test that we can par content """ # Generate temporary folder to work with work_dir = join(self.tmp_dir, 'CodecPar_Test.par', 'work') # Initialize Codec cr = CodecPar(work_dir=work_dir) # Now we want to prepare a folder filled with temporary content source_dir = join( self.tmp_dir, 'CodecPar_Test.par.single', 'my_source' ) assert isdir(source_dir) is False # create a dummy file tmp_file = join(source_dir, 'dummy.rar') self.touch(tmp_file, size='1M', random=True) # Add our file to the encoding process cr.add(tmp_file) # Now we want to compress this content content = cr.encode() # We should have successfully encoded our content assert isinstance(content, sortedset) assert len(content) == 2 for c in content: assert isinstance(c, NNTPBinaryContent) # Content must be attached assert c.is_attached() is True
def mkdir(name, perm=0775): """ A more contained wrapper to directory management """ attempt = 3 if isdir(name): return True while attempt > 0: try: makedirs(name, perm) logger.debug('Created directory: %s' % name) return True except OSError, e: if e[0] == errno.EEXIST: # directory exists; this is okay return isdir(name) logger.debug('Created directory %s exception: %s' % ( name, e, )) # racing condition; just try again attempt -= 1 # To many attempts... fail # ... fall through... return False
def pushd(newdir, create_if_missing=False, perm=0775): """ # A pushd/popd implimentation # Based on : http://stackoverflow.com/questions/6194499/\ pushd-through-os-system # It's use is pretty straight forward: # with pushd('somewhere'): # # somewhere # print os.getcwd() # # # wherever you started # print os.getcwd() """ prevdir = getcwd() if not isdir(newdir) and create_if_missing: # Don't bother checking the success or not # we'll find out soon enough with chdir() mkdir(newdir, perm) chdir(newdir) try: yield finally: # Fall back to previous directory popd() chdir(prevdir)
def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)"""
def resource_isdir(resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)"""
def _isdir(self, path): return os.path.isdir(path)
def ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname)
def _bypass_ensure_directory(path): """Sandbox-bypassing version of ensure_directory()""" if not WRITE_SUPPORT: raise IOError('"os.mkdir" not supported on this platform.') dirname, filename = split(path) if dirname and filename and not isdir(dirname): _bypass_ensure_directory(dirname) mkdir(dirname, 0o755)
def determine(argv=None): argv = argv or sys.argv # use command-line flags if nothing else passed arg_parser = ArgumentParser(prog='pynini', description='Static site processor') arg_parser.add_argument('--verbosity', '-v', action='count', help='increase log verbosity', default=0) arg_parser.add_argument('--dist', metavar='dist_dir', help='specify dist directory (disable auto detect)', default=None) arg_parser.add_argument('--src', '-s', metavar='src_dir', help='specify src directory (disable auto detect)', default=None) arg_parser.add_argument('--pages', metavar='pages_dir', help='specify pages directory (disable auto detect)', default=None) arg_parser.add_argument('--layouts', metavar='layouts_dir', help='specify layouts directory (disable auto detect)', default=None) arg_parser.add_argument('--partials', metavar='partials_dir', help='specify partials directory (disable auto detect)', default=None) parsed_args = arg_parser.parse_args(argv[1:]) # print(parsed_args) # each of the below can be None dist_dir = parsed_args.dist src_dir = parsed_args.src pages_dir = parsed_args.pages layouts_dir = parsed_args.layouts partials_dir = parsed_args.partials operation_dir = getcwd() if isdir(join(operation_dir, 'src')): src_dir = src_dir or join(operation_dir, 'src') dist_dir = dist_dir or join(operation_dir, 'dist') elif 'src' == basename(operation_dir): # we're inside the src dir src_dir = src_dir or operation_dir dist_dir = dist_dir or join(dirname(operation_dir), 'dist') elif not src_dir or not dist_dir: raise SetupError('Could not determine src_dir, dist_dir') return Setup(operation_dir, dist_dir, src_dir, pages_dir=pages_dir, layouts_dir=layouts_dir, partials_dir=partials_dir, verbosity=parsed_args.verbosity, template_loader=None, template_writer=None)
def exists_backup(entity_id): return isdir(path.join(Backuper.backups_dir, str(entity_id)))
def distributions_from_metadata(path): root = os.path.dirname(path) if os.path.isdir(path): if len(os.listdir(path)) == 0: # empty metadata dir; skip return metadata = PathMetadata(root, path) else: metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( root, entry, metadata, precedence=DEVELOP_DIST, )