我们从Python开源项目中,提取了以下48个代码示例,用于说明如何使用os.path.realpath()。
def __init__(self, msShowTimeBetweenSlides=1500): # initialize tkinter super class Tk.__init__(self) # time each slide will be shown self.showTime = msShowTimeBetweenSlides # look for images in current working directory where this module lives chapter_folder = path.realpath(path.dirname(__file__)) resources_folder = path.join(chapter_folder, 'Resources') listOfSlides = [slide for slide in listdir(resources_folder) if slide.endswith('gif') or slide.endswith('jpg')] # endlessly read in the slides so we can show them on the tkinter Label chdir(resources_folder) self.iterableCycle = cycle((ImageTk.PhotoImage(file=slide), slide) for slide in listOfSlides) # create tkinter Label widget which can also display images self.slidesLabel = Label(self) # create the Frame widget self.slidesLabel.pack()
def script_dir(pyobject, follow_symlinks=True): """Get current script's directory Args: pyobject (Any): Any Python object in the script follow_symlinks (Optional[bool]): Follow symlinks or not. Defaults to True. Returns: str: Current script's directory """ if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze path = abspath(sys.executable) else: path = inspect.getabsfile(pyobject) if follow_symlinks: path = realpath(path) return dirname(path)
def test_toolchain_standard_not_implemented(self): spec = Spec() with self.assertRaises(NotImplementedError): self.toolchain(spec) with self.assertRaises(NotImplementedError): self.toolchain.assemble(spec) with self.assertRaises(NotImplementedError): self.toolchain.link(spec) # Check that the build_dir is set on the spec based on tempfile self.assertTrue(spec['build_dir'].startswith( realpath(tempfile.gettempdir()))) # Also that it got deleted properly. self.assertFalse(exists(spec['build_dir']))
def Unmount(self): sel = self['list'].getCurrent() if sel: mountp = sel[3] device = sel[4] system ('umount ' + mountp) try: mounts = open("/proc/mounts") except IOError: return -1 mountcheck = mounts.readlines() mounts.close() for line in mountcheck: parts = line.strip().split(" ") if path.realpath(parts[0]).startswith(device): self.session.open(MessageBox, _("Can't unmount partiton, make sure it is not being used for swap or record/timeshift paths"), MessageBox.TYPE_INFO) self.updateList()
def __init__(self, msShowTimeBetweenSlides=1500): # initialize tkinter super class Tk.__init__(self) # time each slide will be shown self.showTime = msShowTimeBetweenSlides # look for images in current working directory where this module lives chapter_folder = path.realpath(path.dirname(__file__)) resources_folder = path.join(chapter_folder, 'Resources') listOfSlides = [slide for slide in listdir(resources_folder) if slide.endswith('gif')] # endlessly read in the slides so we can show them on the tkinter Label chdir(resources_folder) self.iterableCycle = cycle((PhotoImage(file=slide), slide) for slide in listOfSlides) # create tkinter Label widget which can also display images self.slidesLabel = Label(self) # create the Frame widget self.slidesLabel.pack()
def __init__(self, msShowTimeBetweenSlides=1500): # initialize tkinter super class Tk.__init__(self) # time each slide will be shown self.showTime = msShowTimeBetweenSlides # look for images in current working directory where this module lives # try: .jpeg chapter_folder = path.realpath(path.dirname(__file__)) resources_folder = path.join(chapter_folder, 'Resources') listOfSlides = [slide for slide in listdir(resources_folder) if slide.endswith('gif') or slide.endswith('jpg')] # endlessly read in the slides so we can show them on the tkinter Label chdir(resources_folder) self.iterableCycle = cycle((PhotoImage(file=slide), slide) for slide in listOfSlides) # create tkinter Label widget which can also display images self.slidesLabel = Label(self) # create the Frame widget self.slidesLabel.pack()
def compress_images(target_images): current = 0 total_number = len(target_images) total_time = 0 for image_file in target_images: current += 1 write_log('Start compressing image: {}'.format(realpath(image_file))) if os.path.exists(image_file): time_start = timeit.default_timer() tinify_image(image_file) time_diff = round(timeit.default_timer() - time_start, 2) total_time += time_diff write_log('Compression done takes {} seconds! ({}/{})\n'.format(time_diff, current, total_number)) else: write_log('Ignored: target image does not exist! ({}/{})\n'.format(current, total_number)) if total_time > 0: write_log('Totally takes {} seconds to complete!'.format(total_time))
def handle_add_local_playlist(): directory_path = input("Enter a directory path: ") if not os.path.isdir(os.path.expanduser(directory_path)): print("Invalid directory") return rec_choice = input("Include subdirectories (Y/N)? ").lower() recursive = rec_choice == "y" name = input("How do you want to call the playlist? ") real_path = realpath(directory_path) db = sqlite3.connect(offline_api._db_path) try: with db: db.execute("INSERT INTO playlists(playlistId, name) VALUES(?, ?)", [real_path, name]) add_directory(real_path, real_path, recursive, db) finally: db.close()
def _xxx_iter(self, subset): data_dir = op.join(op.dirname(op.realpath(__file__)), 'data') data_csv = op.join(data_dir, 'voxceleb1.csv') data = pd.read_csv(data_csv, index_col=['segment']) data = data.groupby('verification').get_group(subset) for uri, rows in data.groupby('uri'): annotation = Annotation(uri=uri) for row in rows.itertuples(): segment = Segment(row.start, row.end) annotation[segment] = row.speaker annotated = annotation.get_timeline() current_file = { 'uri': uri, 'database': 'VoxCeleb', 'annotation': annotation, 'annotated': annotated, } yield current_file
def trn_iter(self): data_dir = op.join(op.dirname(op.realpath(__file__)), 'data') data_csv = op.join(data_dir, 'voxceleb1.csv') data = pd.read_csv(data_csv, index_col=['segment']) data = data.groupby('identification').get_group('trn') for uri, datum in data.iterrows(): annotation = Annotation(uri=uri) segment = Segment(0., datum.end - datum.start) annotation[segment] = datum.speaker annotated = annotation.get_timeline() current_file = { 'uri': uri, 'database': 'VoxCeleb', 'annotation': annotation, 'annotated': annotated, } yield current_file
def _xxx_try_iter(self, subset): data_dir = op.join(op.dirname(op.realpath(__file__)), 'data') data_csv = op.join(data_dir, 'voxceleb1.csv') data = pd.read_csv(data_csv, index_col=['segment']) data = data.groupby('identification').get_group(subset) for uri, trial in data.iterrows(): reference = trial.speaker segment = Segment(0., trial.end - trial.start) current_trial = { 'database': 'VoxCeleb', 'uri': uri, 'try_with': Timeline(uri=uri, segments=[segment]), 'reference': reference, } yield current_trial
def trn_iter(self): data_dir = op.join(op.dirname(op.realpath(__file__)), 'data') data_csv = op.join(data_dir, 'voxceleb1.csv') data = pd.read_csv(data_csv, index_col=['segment']) data = data.groupby('identification').get_group('trn') for uri, rows in data.groupby('uri'): annotation = Annotation(uri=uri) for row in rows.itertuples(): segment = Segment(row.start, row.end) annotation[segment] = row.speaker annotated = annotation.get_timeline() current_file = { 'uri': uri, 'database': 'VoxCeleb', 'annotation': annotation, 'annotated': annotated, } yield current_file
def _check_if_pyc(fname): """Return True if the extension is .pyc, False if .py and None if otherwise""" from imp import find_module from os.path import realpath, dirname, basename, splitext # Normalize the file-path for the find_module() filepath = realpath(fname) dirpath = dirname(filepath) module_name = splitext(basename(filepath))[0] # Validate and fetch try: fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath]) except ImportError: raise IOError("Cannot find config file. " "Path maybe incorrect! : {0}".format(filepath)) return pytype, fileobj, fullpath
def test_toolchain_standard_build_dir_remapped(self): """ This can either be caused by relative paths or symlinks. Will result in the manually specified build_dir being remapped to its real location """ fake = mkdtemp(self) real = mkdtemp(self) real_base = basename(real) spec = Spec() spec['build_dir'] = join(fake, pardir, real_base) with pretty_logging(stream=StringIO()) as s: with self.assertRaises(NotImplementedError): self.toolchain(spec) self.assertIn('realpath of build_dir resolved to', s.getvalue()) self.assertEqual(spec['build_dir'], real)
def check_atlas_name(atlas_name=None): "Validates the atlas name and returs its location" if atlas_name in [None, 'None', '']: atlas_name = 'fsaverage' atlas_name = atlas_name.lower() if atlas_name in ['glasser2016']: this_dir = os.path.dirname(os.path.realpath(__file__)) atlas_path = os.path.realpath(pjoin(this_dir, 'atlases', 'glasser2016', 'fsaverage_annot_figshare3498446')) elif atlas_name in ['fsaverage']: this_dir = os.path.dirname(os.path.realpath(__file__)) atlas_path = os.path.realpath(pjoin(this_dir, 'atlases', 'fsaverage')) else: raise NotImplementedError('Requested atlas is not implemented or unreadable.') return atlas_path, atlas_name
def make_job(subject_id_list, freesurfer_dir, base_feature, weight_method, num_bins, edge_range, summary_stat, atlas, fwhm, out_proc_dir, job_dir, job_name, num_procs): "Creates graynet job for running on HPC" str_list_weight_method = ' '.join(weight_method) job_file = pjoin(job_dir, '{}.{}.job'.format(job_name, job_type)) job_log = pjoin(job_dir, '{}.{}.log'.format(job_name, job_type)) if pexists(job_file): os.remove(job_file) with open(job_file, 'w') as jf: jf.write('#!/bin/bash\n') jf.write(specify_hpc_resources(mem, queue, num_procs, job_dir, job_log)) jf.write(make_cli_call(cli_name, realpath(subject_id_list), base_feature, realpath(freesurfer_dir), str_list_weight_method, num_bins, edge_range, summary_stat, atlas, fwhm, realpath(out_proc_dir), num_procs)) st = os.stat(job_file) os.chmod(job_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) return job_file
def setup(): """ Gathers all configs """ global CONFIG, BING_KEY, GENIUS_KEY, config_path, LOG_FILENAME, LOG_LINE_SEPERATOR LOG_FILENAME = 'musicrepair_log.txt' LOG_LINE_SEPERATOR = '........................\n' CONFIG = configparser.ConfigParser() config_path = realpath(__file__).replace(basename(__file__),'') config_path = config_path + 'config.ini' CONFIG.read(config_path) GENIUS_KEY = CONFIG['keys']['genius_key'] BING_KEY = CONFIG['keys']['bing_key'] if GENIUS_KEY == '<insert genius key here>': log.log_error('Warning, you are missing the Genius key. Add it using --config') if BING_KEY == '<insert bing key here>': log.log_error('Warning, you are missing the Bing key. Add it using --config')
def setup(): """ Gathers all configs """ global CONFIG, BING_KEY, GENIUS_KEY, config_path, LOG_FILENAME, LOG_LINE_SEPERATOR LOG_FILENAME = 'musicrepair_log.txt' LOG_LINE_SEPERATOR = '........................\n' CONFIG = configparser.ConfigParser() config_path = realpath(__file__).replace(basename(__file__),'') config_path = config_path + 'config.ini' CONFIG.read(config_path) GENIUS_KEY = CONFIG['keys']['genius_key']
def setup(): """ Gathers all configs """ global CONFIG, BING_KEY, GENIUS_KEY, config_path, LOG_FILENAME, LOG_LINE_SEPERATOR LOG_FILENAME = 'musicrepair_log.txt' LOG_LINE_SEPERATOR = '........................\n' CONFIG = configparser.ConfigParser() config_path = realpath(__file__).replace(basename(__file__),'') config_path = config_path + 'config.ini' CONFIG.read(config_path) BING_KEY = CONFIG['keys']['bing_key']
def setUpClass(cls): """ Set up an HTTP server to serve the XML files. Set the correct port in the IGD.xml URLBase element. """ # Have to chdir here because the py2 SimpleHTTPServer doesn't allow us # to change its working directory like the py3 one does. os.chdir(path.join(path.dirname(path.realpath(__file__)), 'xml')) cls.httpd = sockserver.TCPServer(('127.0.0.1', 0), httpserver.SimpleHTTPRequestHandler) cls.httpd_thread = threading.Thread(target=cls.httpd.serve_forever) cls.httpd_thread.daemon = True cls.httpd_thread.start() cls.httpd_port = cls.httpd.server_address[1] with open('upnp/IGD.xml', 'w') as out_f: with open('upnp/IGD.xml.templ') as in_f: out_f.write(in_f.read().format(port=cls.httpd_port))
def _collect_halo_data_locations(self): # The halos are listed in order in the file. with open("%s.txt" % self.basename, 'r') as fh: lines = fh.readlines() locations = [] realpath = path.realpath("%s.txt" % self.basename) for line in lines: line = line.split() # Prepend the hdf5 file names with the full path. temp = [] for item in line[1:]: # This assumes that the .txt is in the same place as # the h5 files, which is a good one I think. item = item.split("/") temp.append(path.join(path.dirname(realpath), item[-1])) locations.append(temp) return locations
def resolve(self): combined = path.realpath(path.join(self.current_dir, self.str_path)) # matching ../variables/palette to ../variables/palette.scss for ext in self.valid_extensions: file_path = combined + '.' + ext if path.isfile(file_path): return file_path # matching ../variables/palette to ../variables/_palette.scss pathname, filename = path.split(self.str_path) combined = path.realpath(path.join(self.current_dir, pathname, '_' + filename)) for ext in self.valid_extensions: file_path = combined + '.' + ext if path.isfile(file_path): return file_path return ''
def teapot(): """ Generate the Utah teapot as 32 cubic bezier patches. This teapot has a rim, but no bottom. It is also self-intersecting making it unsuitable for perfect-match multipatch modeling. The data is picked from http://www.holmes3d.net/graphics/teapot/ :return: The utah teapot :rtype: List of Surface """ path = join(dirname(realpath(__file__)), 'templates', 'teapot.bpt') with open(path) as f: results = [] numb_patches = int(f.readline()) for i in range(numb_patches): p = np.fromstring(f.readline(), dtype=np.uint8, count=2, sep=' ') basis1 = BSplineBasis(p[0]+1) basis2 = BSplineBasis(p[1]+1) ncp = basis1.num_functions() * basis2.num_functions() cp = [np.fromstring(f.readline(), dtype=np.float, count=3, sep=' ') for j in range(ncp)] results.append(Surface(basis1, basis2, cp)) return results
def test_load_arff(): arff_path = realpath(pjoin(dirname(__file__),'../example_datasets/iris.arff')) mld = MLDataset(arff_path=arff_path) if mld.num_samples != 150: raise ValueError('number of samples mismatch') if mld.num_features != 4: raise ValueError('number of features mismatch') if mld.num_classes != 3: raise ValueError('number of classes mismatch') if len(mld.feature_names) != 4: raise ValueError('length of feature names do not match number of features') # print(mld)
def _fetch_manifest_complete(self, consumer): with open(self._manifest_filename, 'r') as f: manifest = json.load(f) consumers = [] for shard in manifest['shards']: escaped_filename = urllib.quote(shard['download_uri'], safe='') shard_ndn_name = Name(SUBSCRIPTIONS_SOMA).append( 'shard').append(escaped_filename) shard_filename = path.realpath( path.join(self._store_dir, 'shard', escaped_filename)) self._shard_entries.append( {'manifest_path': shard['path'], 'cache_path': shard_filename}) consumer = FileConsumer( shard_ndn_name, shard_filename, face=self._face) consumers.append(consumer) logger.info("Starting consumer: %s", (consumer, )) parallel_consumer = Batch(consumers, 'Consumers') parallel_consumer.connect('complete', self._on_shards_complete) parallel_consumer.start()
def test_read_plink(): datafiles = join(dirname(realpath(__file__)), 'data_files') file_prefix = join(datafiles, 'data') (bim, fam, bed) = read_plink(file_prefix) assert_array_equal( bim.query("chrom=='1' and pos==72515")['snp'], ['rs4030300']) assert_array_equal(bim.query("chrom=='1'").shape, [10, 7]) assert_array_equal( fam.query("fid=='Sample_2' and iid=='Sample_2'")["trait"], ['-9']) assert_array_equal(bed, array([[2, 2, 1], [2, 1, 2], [nan, nan, nan], [nan, nan, 1], [2, 2, 2], [2, 2, 2], [2, 1, 0], [2, 2, 2], [1, 2, 2], [2, 1, 2]]))
def main(root_script): parser = ArgumentParser() parser.add_argument('--devel', '-d', action='store_true', default=False, help='Run in developer mode (affects GUI behavior)') parser.add_argument('db', nargs='?', default=DEFAULT_DB, help='Path to the database file (sieben.db by default)') args = parser.parse_args() app = QApplication(sys.argv) root = dirname(realpath(root_script)) if args.devel: w = loadUi(join(root, 'ui', 'main-devel.ui'), SiebenAppDevelopment(args.db)) else: w = loadUi(join(root, 'ui', 'main.ui'), SiebenApp(args.db)) w.use_dot = not args.devel w.about = loadUi(join(root, 'ui', 'about.ui')) w.setup() w.showMaximized() sys.exit(app.exec_())
def __init__(self, server_instance, full_name): super(Emotes, self).__init__(server_instance, full_name) this_dir = dirname(realpath(__file__)) self.emotes_path = join(this_dir, 'emotesdb') self.infodb_path = join(this_dir, 'emote_info_db') self.tagdb_path = join(this_dir, 'emote_tag_db') self.custom_emote_filename = 'ponybot.json' self.tag_list = {} self.emote_list = {} self.raw_emote_list = [] self.is_running = True self.build_dir(join(self.emotes_path, "tmp")) self.blacklist = {} self.allow_nsfw = False self.config_path = join(self.local_data_dir, "emotes.json") self.load_blacklist() asyncio.ensure_future(self.build_emote_db())
def _safe_realpath(path): try: return realpath(path) except OSError: return path
def load_config(): config_filenames = (realpath('config.json'), expanduser('~/.gimel/config.json')) for config_filename in config_filenames: name, content = _load_config(config_filename) if content: break return name, content
def run_with_reloader(loop, coroutine, cleanup=None, *args, **kwargs): """ Run coroutine with reloader """ clear_screen() print("?? Running in debug mode with live reloading") print(" (don't forget to disable it for production)") # Create watcher handler = Handler(loop) watcher = Observer() # Setup path = realpath(os.getcwd()) watcher.schedule(handler, path=path, recursive=True) watcher.start() print(" (watching {})".format(path)) # Run watcher and coroutine together done, pending = await asyncio.wait([coroutine, handler.changed], return_when=asyncio.FIRST_COMPLETED) # Cleanup cleanup and cleanup() watcher.stop() for fut in done: # If change event, then reload if isinstance(fut.result(), Event): print("Reloading...") reload()
def calc_tree(parents: Set[int], obj): """ Calculates a json object representing all callable attributes of an object Each key represents another attribute and each value is either: - Another dictionary with sub-attributes - A string containing the function definition Args: parents: Set of all memory ids of parent objects This prevents infinite recursion obj: Python object to recursively look for callable attributes in Returns: dict or str: dict of all callable attributes or a string to indicate there are none """ if id(obj) in parents: return '...' try: mod = object.__getattribute__(obj, '__module__') base = mod.split('.')[0] if base != 'mycroft': if not check_output( ['find', dirname(dirname(realpath(__file__))), '-name', base + '.py']): return get_info(obj) else: log.debug(mod, base, obj) except AttributeError: return get_info(obj) obj_cls = obj if isclass(obj) else type(obj) for cls, handler in [ (GroupRunner, tree_group_runner), (GroupPlugin, tree_group_plugin), (OptionPlugin, tree_option_plugin), (dict, tree_dict), (object, tree_default) ]: if issubclass(obj_cls, cls): return handler(parents | {id(obj)}, obj) raise ValueError
def current_directory(full=False): """Return the name of the directory containing this plugin""" from os.path import dirname, realpath, split if full: return dirname(realpath(__file__)) else: return split(dirname(realpath(__file__)))[1]