我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用os.path.abspath()。
def handle_template(self, template, subdir): """ Determines where the app or project templates are. Use django.__path__[0] as the default because we don't know into which directory Django has been installed. """ if template is None: return path.join(django.__path__[0], 'conf', subdir) else: if template.startswith('file://'): template = template[7:] expanded_template = path.expanduser(template) expanded_template = path.normpath(expanded_template) if path.isdir(expanded_template): return expanded_template if self.is_url(template): # downloads the file and returns the path absolute_path = self.download(template) else: absolute_path = path.abspath(expanded_template) if path.exists(absolute_path): return self.extract(absolute_path) raise CommandError("couldn't handle %s template %s." % (self.app_or_project, template))
def script_dir(pyobject, follow_symlinks=True): """Get current script's directory Args: pyobject (Any): Any Python object in the script follow_symlinks (Optional[bool]): Follow symlinks or not. Defaults to True. Returns: str: Current script's directory """ if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze path = abspath(sys.executable) else: path = inspect.getabsfile(pyobject) if follow_symlinks: path = realpath(path) return dirname(path)
def connectionMade(self): dst = path.abspath(path.join(self.destDir,self.filename)) exists = path.exists(dst) if self.resume and exists: # I have been told I want to resume, and a file already # exists - Here we go self.file = open(dst, 'ab') log.msg("Attempting to resume %s - starting from %d bytes" % (self.file, self.file.tell())) elif self.overwrite or not exists: self.file = open(dst, 'wb') else: raise OSError(errno.EEXIST, "There's a file in the way. " "Perhaps that's why you cannot open it.", dst)
def buildPlaylistFile(body): '''write playlist to a new local m3u8 file''' title = 'SmoothStreamsTV.m3u8' # open file to write, or create file if DNE, write <body> to file and save with open(title, 'w+') as f: f.write(body) f.close() # check for existence/closure of file if f.closed: colourPrint('yellow', '\nPlaylist built successfully, located at: ') colourPrint('underline', path.abspath(title)) exit(0) else: raise FileNotFoundError # end buildPlaylistFile()
def modified_results(tool): workdir = dirname(dirname(abspath(__file__))) exeresult = run_executable('git', ['-C', workdir, 'ls-files', '-z', '-m', 'tests/examples/*/result_*']) files = exeresult.stdout.split(b'\x00') ignoreexts = bytestr('.cfg .cfg_diff .conf .pro .pro_diff .txt').split() result_filenames = [] for f in files: if not f: continue filename = join(bytestr(workdir), f) _, ext = os.path.splitext(filename) if not ext or ext in ignoreexts: continue if not os.path.exists(filename): continue result_filenames.append(filename) diff_for_files(tool, result_filenames)
def main(): parser = argparse.ArgumentParser(description='Open a diff tool with three files') parser.add_argument('filename', nargs='*', help='input filename') # The info and debug options were inspired by rsync. parser.add_argument('--tool', help='path of diff tool') parser.add_argument('--results', action='store_true', help='show results diffs') args = parser.parse_args() tool = args.tool if tool is None: tool = os.environ.get(DIFF3_VARNAME) if tool is None: parser.error('Please specify the 3-way file comparison tool with --tool or set %s' % DIFF3_VARNAME) if args.results: return modified_results(tool) filenames = args.filename if not filenames: parser.error('Please specify at least one source file') filenames = [os.path.abspath(f) for f in filenames] diff_for_files(tool, filenames) return 0
def clean_list(self): ''' Manage the list of items to backup ''' # Case <backup_itens> is empty if self.backup_itens == []: msg = "After version 0.0.4 <backup_itens> cannot be empty" self.log.update_log(msg) raise BaseException(msg) from None # Add items for item in self.backup_itens: if path.isfile(path.abspath(item)) or path.isdir(path.abspath(item)): self.backup_list.append(path.abspath(item)) else: log.update_log("Invalid item. It'll be wiped from backup list: <%s>" % item, 'INFO')
def process_list(self): ''' Process every item from a <backup_list> ''' default_dest = path.abspath(path.join(self.target_folder, self.sub_folder_name)) for item in self.backup_list: if path.isdir(item): folder = path.abspath(item).split('\\')[-1] dest = path.join(default_dest, folder) self.log.update_log('Processing directory: %s' % item, 'INFO') self.process_item(path.abspath(item), dest) else: self.log.update_log('Processing file: %s' % item, 'INFO') self.process_item(path.abspath(item), default_dest)
def __init__(self, path=None, expanduser=False): """ Initialize and return a local Path instance. Path can be relative to the current directory. If path is None it defaults to the current working directory. If expanduser is True, tilde-expansion is performed. Note that Path instances always carry an absolute path. Note also that passing in a local path object will simply return the exact same path object. Use new() to get a new copy. """ if path is None: self.strpath = py.error.checked_call(os.getcwd) elif isinstance(path, common.PathBase): self.strpath = path.strpath elif isinstance(path, py.builtin._basestring): if expanduser: path = os.path.expanduser(path) self.strpath = abspath(path) else: raise ValueError("can only pass None, Path instances " "or non-empty strings to LocalPath")
def get_host_file(_path=None): """ If ``_path`` is passed in, return that (makes it easier for the caller if it can pass a ``None``) otherwise look into the most common locations for a host file and if found, return that. """ if _path: return _path # if no path is passed onto us try and look in the cwd for a hosts file if os.path.isfile('hosts'): logger.info( 'found and loaded the hosts file from the current working directory: %s', os.getcwd() ) return path.abspath('hosts') # if that is not the case, try for /etc/ansible/hosts if path.isfile('/etc/ansible/hosts'): return '/etc/ansible/hosts' logger.warning('unable to find an Ansible hosts file to work with') logger.warning('tried locations: %s, %s', os.getcwd(), '/etc/ansible/hosts')
def dirname_is_existing_dir(path: str) -> str: """ >>> import tempfile >>> with tempfile.TemporaryDirectory() as dir: ... dirname_is_existing_dir(dir) == dir True >>> dirname_is_existing_dir('/non/existing/dir') Traceback (most recent call last): argparse.ArgumentTypeError: Dirname of path is not an existing directory. """ if isdir(dirname(abspath(path))): return path else: raise ArgumentTypeError("Dirname of path is not an existing directory.")
def __load_modules(module_name): import os from os.path import join, abspath, isdir, exists rootdir = dirname(dirname(__file__)) search = join(abspath(rootdir), module_name.replace('.', os.sep)) lst = os.listdir(search) modules = [] for d in lst: subpath = join(search, d) if isdir(subpath) and exists(join(subpath, '__init__.py')): submodule_name = module_name + '.' + d __load_modules(submodule_name) modules.append(submodule_name) # load the modules for module_name_to_import in modules: __import__(module_name_to_import)
def get_instance_path(pid, n="", public=True): """ Gets the path to a particular instance of a problem. Args: pid: the problem id n: the instance number, defaults to base of instances Returns: The path to the particular instance. """ generator_path = get_generator_path(pid) name = api.problem.get_problem(pid)["name"] instance_path = path.join(path.dirname(generator_path), "instances", name, str(n)) if public: instance_path = path.join(instance_path, "public") return path.abspath(instance_path)
def test_discover_with_init_module_that_raises_SkipTest_on_import(self): vfs = {abspath('/foo'): ['my_package'], abspath('/foo/my_package'): ['__init__.py', 'test_module.py']} self.setup_import_issue_package_tests(vfs) import_calls = [] def _get_module_from_name(name): import_calls.append(name) raise unittest.SkipTest('skipperoo') loader = unittest.TestLoader() loader._get_module_from_name = _get_module_from_name suite = loader.discover(abspath('/foo')) self.assertIn(abspath('/foo'), sys.path) self.assertEqual(suite.countTestCases(), 1) result = unittest.TestResult() suite.run(result) self.assertEqual(len(result.skipped), 1) self.assertEqual(result.testsRun, 1) self.assertEqual(import_calls, ['my_package']) # Check picklability for proto in range(pickle.HIGHEST_PROTOCOL + 1): pickle.loads(pickle.dumps(suite, proto))
def test_module_symlink_ok(self): full_path = self.setup_module_clash() original_realpath = os.path.realpath mod_dir = os.path.abspath('bar') expected_dir = os.path.abspath('foo') def cleanup(): os.path.realpath = original_realpath self.addCleanup(cleanup) def realpath(path): if path == os.path.join(mod_dir, 'foo.py'): return os.path.join(expected_dir, 'foo.py') return path os.path.realpath = realpath loader = unittest.TestLoader() loader.discover(start_dir='foo', pattern='foo.py')
def test_discovery_from_dotted_path(self): loader = unittest2.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest2.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern, namespace=None): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest2.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) # https://bitbucket.org/pypy/pypy/issue/1259/builtin-module-__file__-attribute-shows
def writeJSONData(settings,data): """ Output the data to a file in JSON pretty format. Args: data: the data to save to the file Raises: IOError on file error """ file = path.join(path.abspath(settings['data-directory']),'sunrise_data.json') logging.debug("writing results to %s", file) try: with open(file,'w') as outfile: outfile.write(json.dumps(data,indent=4,sort_keys=True)) outfile.close except IOError as e: logging.warning('error: %s', e.strerror) sys.exit(1)
def __init__(self, filelocation, versioning=True): """Initialize a new FileReference instance. Args: filelocation: A string of the filepath. versioning: Boolean if versioning is enabled or not. (Defaults true) filecontentinmem: Boolean to decide if local filesystem should be used to or if file content should be kept in memory too . (Defaults false) Raises: ValueError: If no file at the filelocation, or in the given directory + filelocation. """ logger = logging.getLogger('quit.core.FileReference') logger.debug('Create an instance of FileReference') self.content = None self.path = abspath(filelocation) self.modified = False return
def get_abspath(url): url = 'http://%s' % url if url and '://' not in url else url if url and url.startswith('file:///'): # already have an abspath pass elif url and url.startswith('file://'): parent = p.dirname(p.dirname(__file__)) rel_path = url[7:] abspath = p.abspath(p.join(parent, rel_path)) url = 'file://%s' % abspath return decode(url) # https://trac.edgewall.org/ticket/2066#comment:1 # http://stackoverflow.com/a/22675049/408556
def main(args=None): '''Main routine. ''' if args is None: args = sys.argv[1:] parser = make_argument_parser() args = parser.parse_args() source_dir = path.abspath(args.source) out_dir = path.abspath(args.out_path) if not path.isdir(out_dir): raise ValueError('No output directory found (%s)' % out_dir) load_niftis(source_dir, out_dir, args.name, patterns=args.patterns)
def get_app_template_dir(app_name): """Get the template directory for an application We do not use django.db.models.get_app, because this will fail if an app does not have any models. Returns a full path, or None if the app was not found. """ from django.conf import settings from importlib import import_module if app_name in _cache: return _cache[app_name] template_dir = None for app in settings.INSTALLED_APPS: if app.split('.')[-1] == app_name: # Do not hide import errors; these should never happen at this point # anyway mod = import_module(app) template_dir = join(abspath(dirname(mod.__file__)), 'templates') break _cache[app_name] = template_dir return template_dir
def test_main(): """ Basic functional test """ assert markov_novel path = 'tmp' os.makedirs(path) os.chdir(path) # Get raw text as string. from os.path import dirname, abspath filename = os.path.join( dirname(dirname(abspath(__file__))), 'tests/futuristmanifest.txt') with open(filename) as f: text = f.read() # Build the model. text_model = markovify.Text(text) novel = markov_novel.Novel(text_model, chapter_count=1) novel.write(novel_title='my-novel', filetype='md') assert os.path.exists(os.path.join(os.getcwd(), 'my-novel.md')) os.chdir(os.pardir) shutil.rmtree('tmp', ignore_errors=True)
def __init__(self, env): self.env = env super(GatherViewer, self).__init__() green_ball_model = MjModel(osp.abspath( osp.join( MODEL_DIR, 'green_ball.xml' ) )) self.green_ball_renderer = EmbeddedViewer() self.green_ball_model = green_ball_model self.green_ball_renderer.set_model(green_ball_model) red_ball_model = MjModel(osp.abspath( osp.join( MODEL_DIR, 'red_ball.xml' ) )) self.red_ball_renderer = EmbeddedViewer() self.red_ball_model = red_ball_model self.red_ball_renderer.set_model(red_ball_model)
def send2trash(path): if not isinstance(path, text_type): path = text_type(path, 'mbcs') if not op.isabs(path): path = op.abspath(path) fileop = SHFILEOPSTRUCTW() fileop.hwnd = 0 fileop.wFunc = FO_DELETE fileop.pFrom = LPCWSTR(path + '\0') fileop.pTo = None fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT fileop.fAnyOperationsAborted = 0 fileop.hNameMappings = 0 fileop.lpszProgressTitle = None result = SHFileOperationW(byref(fileop)) if result: msg = "Couldn't perform operation. Error code: %d" % result raise OSError(msg)
def meta_autodetect_platform(cls): """ Dark magic to autodetect the platform for built-in shellcodes. User-defined shellcodes must define *arch* and *os*. """ abspath = path.abspath join = path.join split = path.split splitext = path.splitext sep = path.sep module = cls.__module__ if module != '__main__': tokens = cls.__module__.split('.') if len(tokens) < 2 or tokens[0] != base_package or \ tokens[1] == base_file: return tokens.insert(-1, 'any') tokens = tokens[1:3] else: module = abspath(sys.modules[module].__file__) if not module.startswith(base_dir): return tokens = module.split(sep) tokens = tokens[len(base_dir.split(sep)):-1] while len(tokens) < 2: tokens.append('any') cls.arch, cls.os = tokens
def touch(self, path, size=None, random=False, perm=None, time=None): """Simplify the dynamic creation of files or the updating of their modified time. If a size is specified, then a file of that size will be created on the disk. If the file already exists, then the size= attribute is ignored (for safey reasons). if random is set to true, then the file created is actually created using tons of randomly generated content. This is MUCH slower but nessisary for certain tests. """ path = abspath(path) if not isdir(dirname(path)): mkdir(dirname(path), 0700) if not exists(path): size = strsize_to_bytes(size) if not random: f = open(path, "wb") if isinstance(size, int) and size > 0: f.seek(size-1) f.write("\0") f.close() else: # fill our file with randomly generaed content with open(path, 'wb') as f: # Fill our file with garbage f.write(urandom(size)) # Update our path utime(path, time) if perm is not None: # Adjust permissions chmod(path, perm) # Return True return True
def _samefile(src, dst): # Macintosh, Unix. if hasattr(os.path, 'samefile'): try: return os.path.samefile(src, dst) except OSError: return False # All other platforms: check for same pathname. return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst)))
def _destinsrc(src, dst): src = abspath(src) dst = abspath(dst) if not src.endswith(os.path.sep): src += os.path.sep if not dst.endswith(os.path.sep): dst += os.path.sep return dst.startswith(src)
def remove_watcher(self, filename): key = path.abspath(filename) if key in self.watchers.keys(): self.watchers[key].stop() self.watchers.pop(key) self.threads.pop(key)
def __init__(self, filename, counter): self.filename = path.abspath(filename) self.queue = Queue() self.check_chain = CheckerChain(self.queue, counter) self.observer = Observer() self.fd = None self.offset = 0 if path.isfile(self.filename): self.fd = open(self.filename) self.offset = path.getsize(self.filename)
def on_deleted(self, event): if path.abspath(event.src_path) == self.filename: self.fd.close()
def on_moved(self, event): if path.abspath(event.src_path) == self.filename: self.fd.close() if path.abspath(event.dest_path) == self.filename and path.isfile(self.filename): self.fd = open(self.filename) self.offset = path.getsize(self.filename)
def on_modified(self, event): if path.abspath(event.src_path) == self.filename: self.fd.seek(self.offset, 0) for line in self.fd: line = line.rstrip('\n') self.queue.put(line) self.offset = self.fd.tell()
def root(*args): """ Get the absolute path of the given path relative to the project root. """ return join(abspath(dirname(__file__)), *args)
def get_default_file(self, media_type, ext='.png'): return path.abspath(path.join(self.directories[media_type], 'default'+ext)) #endregion #region HTML file paths
def get_html_path(self, date): """Retrieves the output file for the backup with the given name, in the given date. An example might be 'backups/exported/year/MM/dd.html'""" return path.abspath(path.join(self.base_dir, str(date.year), str(date.month), '{}.html'.format(date.day)))
def get_propic_path(self, entity, allow_multiple=False): """Gets the profile picture full path for the given entity. If allow_multiple is given, a more unique ID will be given to the files (photo.photo_id) If allow_multiple is NOT given, a more generic ID will be given to the files (entity.id)""" name = self.get_propic_name(entity, allow_multiple=allow_multiple) return path.abspath(path.join(self.directories['propics'], name)) if name else None
def get_msg_media_path(self, msg): result = None if isinstance(msg.media, MessageMediaPhoto): result = path.join(self.directories['photos'], '{}{}' .format(msg.media.photo.id, get_extension(msg.media))) if isinstance(msg.media, MessageMediaDocument): media_type = None for attr in msg.media.document.attributes: if isinstance(attr, DocumentAttributeAnimated): media_type = 'gifs' break if isinstance(attr, DocumentAttributeAudio): media_type = 'audios' break if isinstance(attr, DocumentAttributeVideo): media_type = 'videos' break if isinstance(attr, DocumentAttributeSticker): media_type = 'stickers' break if isinstance(attr, DocumentAttributeFilename): media_type = 'documents' break if not media_type: return None result = path.join(self.directories[media_type], '{}{}' .format(msg.media.document.id, get_extension(msg.media))) if result: return path.abspath(result) #endregion
def load_ciede2000_data(): dtype = [('pair', int), ('1', int), ('L1', float), ('a1', float), ('b1', float), ('a1_prime', float), ('C1_prime', float), ('h1_prime', float), ('hbar_prime', float), ('G', float), ('T', float), ('SL', float), ('SC', float), ('SH', float), ('RT', float), ('dE', float), ('2', int), ('L2', float), ('a2', float), ('b2', float), ('a2_prime', float), ('C2_prime', float), ('h2_prime', float), ] # note: ciede_test_data.txt contains several intermediate quantities path = pjoin(dirname(abspath(__file__)), 'ciede2000_test_data.txt') return np.loadtxt(path, dtype=dtype)
def here(): return dirname(abspath(unifilename(__file__)))
def main(): exdir = join(dirname(dirname(abspath(__file__))), 'tests', 'examples') print(exdir) for path, dirs, files in os.walk(exdir): base = basename(path) if not base.startswith('result_'): continue for name in files: filename = os.path.join(path, name) print("Deleting %s" % filename) os.remove(filename) print("Deleting %s" % path) os.rmdir(path)
def get_output_dir(imdb, weights_filename): """Return the directory where experimental artifacts are placed. If the directory does not exist, it is created. A canonical path is built using the name from an imdb and a network (if not None). """ outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name)) if weights_filename is None: weights_filename = 'default' outdir = osp.join(outdir, weights_filename) if not os.path.exists(outdir): os.makedirs(outdir) return outdir
def get_output_tb_dir(imdb, weights_filename): """Return the directory where tensorflow summaries are placed. If the directory does not exist, it is created. A canonical path is built using the name from an imdb and a network (if not None). """ outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'tensorboard', __C.EXP_DIR, imdb.name)) if weights_filename is None: weights_filename = 'default' outdir = osp.join(outdir, weights_filename) if not os.path.exists(outdir): os.makedirs(outdir) return outdir