我们从Python开源项目中,提取了以下36个代码示例,用于说明如何使用appdirs.user_cache_dir()。
def get_cache_dir(subdir=None): """ Function for getting cache directory to store reused files like kernels, or scratch space for autotuning, etc. """ cache_dir = os.environ.get("NEON_CACHE_DIR") if cache_dir is None: cache_dir = appdirs.user_cache_dir("neon", "neon") if subdir: subdir = subdir if isinstance(subdir, list) else [subdir] cache_dir = os.path.join(cache_dir, *subdir) if not os.path.exists(cache_dir): os.makedirs(cache_dir) return cache_dir
def get_default_cache(): """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". """ return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') )
def get_api_cache_folder(): # NOTE: Using app name for author too cache = appdirs.user_cache_dir(APP_NAME, APP_NAME) if not os.path.isdir(cache): os.makedirs(cache) return cache
def download_url(url, user_cache_dir, use_cache=True): filename = hashlib.sha256(url.encode()).hexdigest() filename_bin = os.path.join(user_cache_dir, filename) if use_cache and os.path.exists(filename_bin): return filename_bin print('download firmware from', url) print('save as', filename_bin) urlretrieve(url, filename_bin, reporthook=download_url_reporthook) return filename_bin
def __call__(self, **kwargs): user_cache_dir = appdirs.user_cache_dir('bcf') repos = Github_Repos(user_cache_dir) # search = kwargs.get('prefix', None) firmwares = repos.get_firmware_list() if self._find_bin: firmwares += glob.glob('*.bin') return firmwares
def parse_args(): parser = argparse.ArgumentParser(description="set (near-realtime) picture of Earth as your desktop background", epilog="http://labs.boramalper.org/himawaripy") parser.add_argument("--version", action="version", version="%(prog)s {}.{}.{}".format(*HIMAWARIPY_VERSION)) group = parser.add_mutually_exclusive_group() group.add_argument("--auto-offset", action="store_true", dest="auto_offset", default=False, help="determine offset automatically") group.add_argument("-o", "--offset", type=int, dest="offset", default=10, help="UTC time offset in hours, must be less than or equal to +10") parser.add_argument("-l", "--level", type=int, choices=[4, 8, 16, 20], dest="level", default=4, help="increases the quality (and the size) of each tile. possible values are 4, 8, 16, 20") parser.add_argument("-d", "--deadline", type=int, dest="deadline", default=6, help="deadline in minutes to download all the tiles, set 0 to cancel") parser.add_argument("--save-battery", action="store_true", dest="save_battery", default=False, help="stop refreshing on battery") parser.add_argument("--output-dir", type=str, dest="output_dir", help="directory to save the temporary background image", default=appdirs.user_cache_dir(appname="himawaripy", appauthor=False)) args = parser.parse_args() if not -12 <= args.offset <= 10: sys.exit("OFFSET has to be between -12 and +10!\n") if not args.deadline >= 0: sys.exit("DEADLINE has to be greater than (or equal to if you want to disable) zero!\n") return args
def configure_logging(): cache_dir = user_cache_dir(appname='spoppy') LOG_FILE_NAME = os.path.join( cache_dir, 'spoppy.log' ) LOG_LEVEL = getattr( logging, os.getenv('SPOPPY_LOG_LEVEL', ''), logging.INFO ) if not os.path.isdir(cache_dir): os.makedirs(cache_dir) logger = logging.getLogger('spoppy') logger.setLevel(LOG_LEVEL) handler = RotatingFileHandler( LOG_FILE_NAME, maxBytes=1024 * 1024 * 10, backupCount=10, ) handler.setLevel(LOG_LEVEL) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) handler.setFormatter(formatter) logger.addHandler(handler) logger.debug('Logger set up')
def __init__(self, username, password, player): if not os.path.isdir(self.user_cache_dir): os.makedirs(self.user_cache_dir) self.player = player self.username = username self.password = password self._spotipy_token = None self._pyspotify_session = None self._pyspotify_session_loop = None self.service_stop_event = threading.Event() self.services = [ DBusListener(self, self.service_stop_event), ResizeChecker(self, self.service_stop_event) ] self._spotipy_client = Spotify() try: import alsaaudio # noqa self._sink_klass = spotify.AlsaSink except ImportError: try: import pyaudio # noqa self._sink_klass = spotify.PortAudioSink except ImportError: raise AudioError( 'Neither AlsaAudio nor PortAudio is installed. ' 'Please install either of these!' )
def get_spotipy_oauth(self): client_id = 'ce333851d4db4ba1b6ccf9eaa52345fc' client_secret = '549ec6a308cc4836b7144fc42277a6b2' redirect_uri = 'http://localhost:8157/' cache_location = os.path.join( self.user_cache_dir, 'spotipy_token.cache' ) try: # Clean up tokens pre 2.2.1 # TODO remove soon with open(cache_location, 'r') as f: contents = f.read() data = json.loads(contents) if 'scope' in data and data['scope'] is None: del data['scope'] with open(cache_location, 'w') as f: f.write(json.dumps(data)) except IOError: pass except ValueError: logger.warning( 'ValueError while getting token info', exc_info=True ) return oauth2.SpotifyOAuth( client_id, client_secret, redirect_uri, scope=SPOFITY_WEB_API_SCOPE, cache_path=cache_location )
def fm_index_path(genome): """ Returns a path for cached reference peptides, for the given genome. """ cache_dir = user_cache_dir('vaxrank') if not os.path.exists(cache_dir): os.makedirs(cache_dir) return os.path.join(cache_dir, '%s_%d_%d.fm' % ( genome.species.latin_name, genome.release, 2 if six.PY2 else 3))
def user_cache_dir(appname=None): return os.path.join(os.getcwd(), 'tiles')
def parse_args(): cache_dir = user_cache_dir(appname='route-plotter') ap = ArgumentParser(description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter) ap.add_argument('--color-tiles', action='store_true', help='Use full-color map tiles.') ap.add_argument('--scale', type=float, default=1, help='Figure size scale fraction.') ap.add_argument('--zoom', type=int, default=None, help='Map zoom level.') ap.add_argument('--tile-cache', type=str, default=cache_dir, help='Directory for storing cached map tiles.') ap.add_argument('--save', type=str, help='Save animation to disk.') ap.add_argument('--max-margin', type=float, default=0.05, help='Keep up to this fraction of width/height as margin.') ap.add_argument('--max-loop-gap', type=float, default=200, help='Maximum gap between start/end points, in meters.') ap.add_argument('--max-start-dist', type=float, default=200, help=('Maximum distance from route start to the ' 'mean starting location, in meters.')) ap.add_argument('--num-frames', type=int, default=500, help='Number of frames to animate.') ap.add_argument('--fps', type=float, default=10, help='Frames per second.') ap.add_argument('--bitrate', type=int, help='Bitrate when saving animation.') ap.add_argument('--line-width', type=float, default=2.5, help='Line width.') ap.add_argument('--tail-color', type=str, default='blue', help='Color of trailing line.') ap.add_argument('--tail-alpha', type=float, default=0.2, help='Opacity of trailing line.') ap.add_argument('--head-color', type=str, default='red', help='Head color.') ap.add_argument('--head-alpha', type=float, default=1, help='Head opacity.') ap.add_argument('--head-fade', type=float, default=90, help=('Number of seconds (in route time) over which the head' ' of the line fades out.')) ap.add_argument('route', type=str, nargs='+', help='Route file(s) to use.') args = ap.parse_args() if len(args.route) == 1: args.route = glob.glob(args.route[0]) args.route = map(open, args.route) return args
def setup_h2m_structs_pyclibrary(): cache_dir = os.path.join(user_cache_dir('kotori'), 'lst') if not os.path.isdir(cache_dir): os.makedirs(cache_dir) lib_dir = os.path.join(os.path.dirname(__file__), 'cpp') library = LibraryAdapter(u'h2m_structs.h', u'h2m_structs.so', include_path=lib_dir, library_path=lib_dir, cache_path=cache_dir) struct_registry = StructRegistryByID(library) return struct_registry
def setup_h2m_structs_cffi(): cache_dir = os.path.join(user_cache_dir('kotori'), 'lst') if not os.path.isdir(cache_dir): os.makedirs(cache_dir) lib_dir = os.path.join(os.path.dirname(__file__), 'cpp') library = LibraryAdapterCFFI(u'h2m_structs.h', u'h2m_structs.so', include_path=lib_dir, library_path=lib_dir, cache_path=cache_dir) struct_registry = StructRegistryByID(library) return struct_registry
def from_header(cls, include_path=None, header_files=None): cache_dir = user_cache_dir('lst', 'kotori') if not os.path.isdir(cache_dir): os.makedirs(cache_dir) library = LibraryAdapter( header_files, cls.compile(include_path, header_files), include_path=include_path, library_path=include_path, cache_path=cache_dir) return library
def _get_default_cache_path(): path = appdirs.user_cache_dir('zeep', False) try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise return os.path.join(path, 'cache.db')
def get_cache_dir(module_name: Optional[str]) -> str: cache_dir = appdirs.user_cache_dir("activitywatch") return os.path.join(cache_dir, module_name) if module_name else cache_dir
def run(self, cache=True): """Run application.""" self._query() # configure `requests` cache if cache: cache_dir = appdirs.user_cache_dir('craigslist') os.makedirs(cache_dir, exist_ok=True) requests_cache.install_cache( cache_name=os.path.join(cache_dir, 'craigslist'), expire_after=timedelta(hours=0.5)) print('Running query...\n') # record the start time start = time.time() self.prices = self._getprices() # determine elapsed time of queries self.duration = time.time() - start # remove expired cache entries if cache: requests_cache.core.remove_expired_responses() # print statistics (if any price data exists) if self.prices: self._print() else: print('Nothing found for that search.')
def main(): generators = {"grid": grid_drawer.TracksDrawer(), "calendar": calendar_drawer.TracksDrawer(), "heatmap": heatmap_drawer.TracksDrawer(), "circular": circular_drawer.TracksDrawer()} args_parser = argparse.ArgumentParser() args_parser.add_argument('--gpx-dir', dest='gpx_dir', metavar='DIR', type=str, default='.', help='Directory containing GPX files (default: current directory).') args_parser.add_argument('--output', metavar='FILE', type=str, default='poster.svg', help='Name of generated SVG image file (default: "poster.svg").') args_parser.add_argument('--year', metavar='YEAR', type=str, default='all', help='Filter tracks by year; "NUM", "NUM-NUM", "all" (default: all years)') args_parser.add_argument('--title', metavar='TITLE', type=str, default="My Tracks", help='Title to display (default: "My Tracks").') args_parser.add_argument('--athlete', metavar='NAME', type=str, default="John Doe", help='Athlete name to display (default: "John Doe").') args_parser.add_argument('--special', metavar='FILE', action='append', default=[], help='Mark track file from the GPX directory as special; use multiple times to mark multiple tracks.') args_parser.add_argument('--type', metavar='TYPE', default='grid', choices=generators.keys(), help='Type of poster to create (default: "grid", available: "{}").'.format('", "'.join(generators.keys()))) args_parser.add_argument('--background-color', dest='background_color', metavar='COLOR', type=str, default='#222222', help='Background color of poster (default: "#222222").') args_parser.add_argument('--track-color', dest='track_color', metavar='COLOR', type=str, default='#4DD2FF', help='Color of tracks (default: "#4DD2FF").') args_parser.add_argument('--text-color', dest='text_color', metavar='COLOR', type=str, default='#FFFFFF', help='Color of text (default: "#FFFFFF").') args_parser.add_argument('--special-color', dest='special_color', metavar='COLOR', default='#FFFF00', help='Special track color (default: "#FFFF00").') args_parser.add_argument('--units', dest='units', metavar='UNITS', type=str, choices=['metric', 'imperial'], default='metric', help='Distance units; "metric", "imperial" (default: "metric").') args_parser.add_argument('--clear-cache', dest='clear_cache', action='store_true', help='Clear the track cache.') args = args_parser.parse_args() loader = track_loader.TrackLoader() loader.cache_dir = os.path.join(appdirs.user_cache_dir(__app_name__, __app_author__), "tracks") if not loader.year_range.parse(args.year): raise Exception('Bad year range: {}.'.format(args.year)) loader.special_file_names = args.special if args.clear_cache: loader.clear_cache() tracks = loader.load_tracks(args.gpx_dir) if not tracks: raise Exception('No tracks found.') print("Creating poster of type '{}' and storing it in file '{}'...".format(args.type, args.output)) p = poster.Poster(generators[args.type]) p.athlete = args.athlete p.title = args.title p.colors = {'background': args.background_color, 'track': args.track_color, 'special': args.special_color, 'text': args.text_color} p.units = args.units p.tracks = tracks p.draw(args.output)
def setup_sync_dir(self): self.cache_dir = appdirs.user_cache_dir("studip", "fknorr") self.create_path(self.cache_dir) history_file_name = os.path.join(appdirs.user_cache_dir("studip", "fknorr"), "history") history = [] try: with open(history_file_name, "r", encoding="utf-8") as file: history = list(filter(None, file.read().splitlines())) except Exception: pass skipped_history = 0 if "sync_dir" in self.command_line: sync_dir = self.command_line["sync_dir"] else: if history and os.path.isdir(history[0]): sync_dir = history[0] print("Using last sync directory {} ...".format(sync_dir)) else: skipped_history = 1 default_dir = "~/StudIP" for entry in history[1:]: skipped_history += 1 if os.path.isdir(entry): default_dir = entry sync_dir = input("Sync directory [{}]: ".format(default_dir)) if not sync_dir: sync_dir = default_dir sync_dir = os.path.abspath(os.path.expanduser(sync_dir)) history = history[skipped_history:] while sync_dir in history: history.remove(sync_dir) history.insert(0, sync_dir) self.sync_dir = sync_dir try: with open(history_file_name, "w", encoding="utf-8") as file: file.write("\n".join(history) + "\n") except Exception as e: self.print_io_error("Unable to write to", history_file_name, e) raise ApplicationExit() self.dot_dir = os.path.join(self.sync_dir, ".studip") self.create_path(self.dot_dir) self.config_file_name = os.path.join(self.dot_dir, "studip.conf") self.db_file_name = os.path.join(self.dot_dir, "cache.sqlite")