Python schedule 模块,every() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用schedule.every()

项目:core    作者:IntelligentTrading    | 项目源码 | 文件源码
def handle(self, *args, **options):
        logger.info("Getting ready to trawl Poloniex...")
        schedule.every(1).minutes.do(pull_poloniex_data)

        # @Alex
        # run resampling for all periods and calculate indicator values
        for hor_period in PERIODS_LIST:
            schedule.every(hor_period / time_speed).minutes.do(_resample_then_metrics, {'period': hor_period})


        keep_going = True
        while keep_going:
            try:
                schedule.run_pending()
                time.sleep(1)
            except Exception as e:
                logger.debug(str(e))
                logger.info("Poloniex Trawl shut down.")
                keep_going = False
项目:StockTalk3    作者:xenu256    | 项目源码 | 文件源码
def mine(self, query, minePeriod, requestFrequency, analyzeFrequency, requestAmount = 50, similarityCutoff = 90):
        try:
            self.query = query
            self.cutoff = similarityCutoff
            self.amount = requestAmount

            startStr = strftime("[%Y-%m-%d %H:%M:%S]", localtime())
            schedule.every(requestFrequency).seconds.do(self.requestTweets)
            schedule.every(analyzeFrequency).seconds.do(self.analyzeGroup)

            end = time()+minePeriod
            while time() <= end:
                schedule.run_pending()

            endStr = strftime("[%Y-%m-%d %H:%M:%S]", localtime())
            print("Mine complete from\n" + startStr +" - " + endStr +"\n")
        except Exception as e:
            print(e)
项目:Red_Star    作者:medeor413    | 项目源码 | 文件源码
def activate(self):
        self.run_timer = True
        try:
            with open(self.plugin_config.motd_file, "r", encoding="utf8") as f:
                self.motds = json.load(f)
                schedule.every().day.at("00:00").do(self._display_motd)
                asyncio.ensure_future(self._run_motd())
        except FileNotFoundError:
            with open(self.plugin_config.motd_file, "w", encoding="utf8") as f:
                self.motds = {}
                f.write("{}")
        except json.decoder.JSONDecodeError:
            self.logger.exception(f"Could not decode {self.plugin_config.motd_file}! ", exc_info=True)
        # This is stupid
        self.valid_months = {
            "January", "February", "March", "April", "May", "June", "July",
            "August", "September", "October", "November", "December", "Any"
        }
        self.valid_days = {
            "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday", "Any",
            "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16",
            "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31"
        }
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def handle(self, *args, **kwargs):

        logger.info("%s - starting jobs schedule" % (__name__))

        try:
            '''            
            schedule.every().hour.do(job_update_entities)
            schedule.every().hour.do(job_update_clients)
            schedule.every().hour.do(job_update_checks)
            schedule.every().hour.do(job_update_trends)
            #schedule.every(10).minutes.do(job_update_events)
            '''

            schedule.every(settings.CACHE_ENTITY_TTL).seconds.do(job_update_entities)
            schedule.every(settings.CACHE_CLIENT_TTL).seconds.do(job_update_clients)
            schedule.every(settings.CACHE_TRENDS_TTL).seconds.do(job_update_trends)


            while True:
                schedule.run_pending()
                sleep(1)

        except KeyboardInterrupt:
            logger.info("%s - user signal exit!" % (__name__))
            exit(0)
项目:xiaodi    作者:shenaishiren    | 项目源码 | 文件源码
def auto_delete_delivery():
    def delete_job():
        from xiaodi.api.mysql import session_scope, Task

        now = datetime.utcnow()
        for deadline, id_ in TaskDepot.scan_delete_task():
            deadline, id_ = tranform_utctime(deadline), int(id_)
            id_, deadline = int(id_), tranform_utctime(deadline)
            if now < deadline:
                break
            with session_scope() as session:
                delivery = session.query(Task).filter(Task.id == id_).first()
                user = gen_temp_object(id=delivery.puser_id)
                DeliveryManager.delete(user, delivery, session=session)

        LOG.info('auto delete task finished')

    schedule.every(5).minutes.do(delete_job)
项目:xiaodi    作者:shenaishiren    | 项目源码 | 文件源码
def auto_finish_delivery():
    def finish_job():
        from xiaodi.api.mysql import session_scope, Task

        now = datetime.utcnow()
        for id_, deadline in TaskDepot.scan_finish_task():
            deadline, id_ = tranform_utctime(deadline), int(id_)
            if now < deadline:
                break
            with session_scope() as session:
                delivery = session.query(Task).filter(Task.id == id_).first()
                user = gen_temp_object(id=delivery.puser_id)
                DeliveryManager.finish(user, delivery, session=session)

        LOG.info('auto finish task finished')

    schedule.every(30).minutes.do(finish_job)
项目:pyEfi    作者:lucid281    | 项目源码 | 文件源码
def madness(self):
        def fast():
            self.pyEfi.redisDb.incr('pyefi:eye:fast')

        def pollRateTest():
            self.pollRateRedis('pyefi:eye:fast')

        schedule.every(1).seconds.do(pollRateTest)
        schedule.every(0.2).seconds.do(fast)

        while 1:
            schedule.run_pending()

            # This is the scheduler tick rate,
            # nothing will poll faster than this.
            sleep(0.005)
项目:fanbot    作者:rpalo    | 项目源码 | 文件源码
def main(greeting=True):

    bot = Fanbot.create_from_modules(secrets, compliments)

    if greeting:
        bot.hello_world()
    try:
        # Initially hardcoding a schedule.  Could be swapped out later
        # PLACE YOUR CUSTOM SCHEDULES HERE.  SEE SCHEDULE MODULE DOCUMENTATION.
        schedule.every(30).minutes.do(bot.respond_to_tweets)
        schedule.every().day.at("10:30").do(bot.post_compliment)
        while True:
            schedule.run_pending()
            # You can tune the sleep length too.  Should be roughly the same
            # as the most frequently scheduled job above
            time.sleep(30*60) # seconds

    except KeyboardInterrupt:
        if greeting:
            bot.goodbye()
项目:thunderbolt100k    作者:cuyu    | 项目源码 | 文件源码
def polling():
    for m in sys.modules['thunderbolt100k.widgets'].modules:
        if m.endswith('__init__.py'):
            continue

        widget_name = os.path.basename(m).replace('.py', '')
        widget = getattr(sys.modules['thunderbolt100k.widgets'], widget_name)
        if constants.CONFIG.get('{0}_INTERVAL'.format(widget.__name__.upper())):
            interval = int(constants.CONFIG.get('{0}_INTERVAL'.format(widget_name.upper())))
        else:
            interval = 5  # Default is 5 min if not specified
        schedule.every(interval).minutes.do(widget_main, widget)
        widget_main(widget)  # Run the function instantly and then schedule
    while True:
        schedule.run_pending()
        time.sleep(60)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def dynamically_scrape_data(filename, link_list, interval, num_retries = 10):
    """
    Repeatedly runs the scraper every time the specified interval has passed
    and continuously appends the data to a file.
    """

    def job():
        scrape_all_data_from_all_featured_products(filename,
                                                   link_list,
                                                   num_retries)
    job()
    schedule.every(interval).minutes.do(job)

    while True:
        schedule.run_pending()
        time.sleep(30)

    print "Dynamic scraping finished"
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def clean_links_and_dynamically_scrape(filename, interval, num_retries = 10):
    """
    Repeatedly updates the link list and runs the scraper every time the
    specified interval has passed and continuously appends the data to a file.
    """

    def job():
        clean_links_and_scrape(filename, num_retries)

    job()
    schedule.every(interval).minutes.do(job)

    while True:
        schedule.run_pending()
        time.sleep(30)

    print "Dynamic scraping finished"
项目:hapi    作者:mayaculpa    | 项目源码 | 文件源码
def prepare_jobs(self, jobs):
        suffixed_names = {
            'week': 'weekly',
            'day': 'daily',
            'hour': 'hourly',
            'minute': 'minutes',
            'second': 'seconds',
        }
        for job in jobs:
            if not job.enabled:
                continue

            interval_name = job.time_unit.lower()
            if job.interval > 0: # There can't be a job less than 0 (0 minutes? 0 seconds?)
                plural_interval_name = interval_name + 's'
                d = getattr(schedule.every(job.interval), plural_interval_name)
                d.do(self.run_job, job)
                Log.info("  Loading %s job: %s.", suffixed_names[interval_name], job.name)
            elif interval_name == 'day':
                schedule.every().day.at(job.at_time).do(self.run_job, job)
                Log.info("  Loading time-based job: " + job.name)
            else:
                d = getattr(schedule.every(), interval_name)
                d.do(self.run_job, job)
                Log.info("  Loading %s job: %s", interval_name, job.name)
项目:PiAlarm    作者:KyleKing    | 项目源码 | 文件源码
def display_weather(self):
        if not self._scheduled:
            # Start a fresh thread for weather updates
            cg.send('Starting update_weather()')
            self.update_weather()
            self._checkSchedule = True
            self._scheduled = True
            schedule.every(5).minutes.do(self.update_weather)
            cg.thread(self.run_sched)  # Start a separate thread
        elif self._checkSchedule:
            cg.send('Error: update_weather() is already running')
        elif not self._checkSchedule:
            # Allow the weather updates to continue
            cg.send('Toggling weather updates back on')
            self._checkSchedule = True
            cg.thread(self.run_sched)  # Start a separate thread
        else:
            cg.send('No appropriate display_weather() action...')
项目:health    作者:seecloud    | 项目源码 | 文件源码
def main():
    config.process_args("HEALTH",
                        default_config_path=cfg.DEFAULT_CONF_PATH,
                        defaults=cfg.DEFAULT,
                        validation_schema=cfg.SCHEMA)
    # Init Elastic index in backend

    for src in CONF["sources"]:
        es.ensure_index_exists(CONF["backend"]["elastic"], src["region"])

    # Setup periodic job that does aggregation magic
    run_every_min = CONF["config"]["run_every_minutes"]
    schedule.every(run_every_min).minutes.do(job)

    job()

    while True:
        schedule.run_pending()
        time.sleep(1)
项目:loggingnight    作者:kdknigga    | 项目源码 | 文件源码
def enable_housekeeping(run_interval=3600):
    cease_continuous_run = threading.Event()

    class ScheduleThread(threading.Thread):
        @staticmethod
        def run():
            while not cease_continuous_run.is_set():
                schedule.run_pending()
                time.sleep(run_interval)

    continuous_thread = ScheduleThread()
    continuous_thread.start()

    schedule.every(6).hours.do(LoggingNight.garbage_collect_cache)
项目:microurl    作者:francium    | 项目源码 | 文件源码
def start(job):
    schedule.every(30).minutes.do(job)
    threading.Thread(target=worker).start()
项目:pgbadger-rds-cron    作者:CanopyTax    | 项目源码 | 文件源码
def build_schedule():
    print('Starting sqlcron. Current time: {}'
          .format(str(datetime.now())))
    interval = int(os.getenv('INTERVAL', '1'))
    unit = os.getenv('UNIT', 'day')
    time_of_day = os.getenv('TIME')

    evaluation_string = 'schedule.every(interval).' + unit
    if time_of_day:
        evaluation_string += '.at(time_of_day)'

    evaluation_string += '.do(run)'
    eval(evaluation_string)
项目:mccelections    作者:mcclatchy    | 项目源码 | 文件源码
def download_elections():
    call_command('download_elections')

## check if there's an Election today; if so, start checking every minute whether to set live and start import
项目:xiaodi    作者:shenaishiren    | 项目源码 | 文件源码
def start_update_engagement():
    def update_job():
        with DBSession() as session:
            for engagement in session.query(Engagement).all():
                en = json.loads(engagement.engagement)
                en.append(0)
                engagement.engagement = json.dumps(en)
                session.add(engagement)
                session.commit()

    schedule.every().day.at('00:00').do(update_job)
项目:argosd    作者:danielkoster    | 项目源码 | 文件源码
def deferred(self):
        """Called from the thread, schedules pending tasks."""
        schedule.every(10).minutes.do(self._add_rssfeedparsertask)
        schedule.every().minute.do(self._add_episodedownloadtask)

        # Add the RSSFeedParserTask immediately so we don't waste 10 minutes
        self._add_rssfeedparsertask()

        while not self._stop_event.is_set():
            schedule.run_pending()
            sleep(1)
项目:availability    作者:seecloud    | 项目源码 | 文件源码
def main(period=None):
    """Constantly check services availability.

    This runs infinite availability check with given period.
    :param period: period in seconds
    """
    if not config.get_config().get("regions"):
        LOG.error("No regions configured. Quitting.")
        return 1

    period = period or config.get_config().get("period", 60)

    if SERVICE_CONN_TIMEOUT + SERVICE_READ_TIMEOUT > period:
        LOG.error("Period can not be lesser than timeout, "
                  "otherwise threads could crowd round.")
        return 1

    backend = config.get_config().get("backend")
    if backend["type"] != "elastic":
        LOG.error("Unexpected backend: %(type)s" % backend)
        return 1

    if not storage.get_elasticsearch(check_availability=True):
        LOG.error("Failed to set up Elasticsearch")
        return 1

    LOG.info("Start watching with period %s seconds" % period)
    schedule.every(period).seconds.do(watch_services)

    while True:
        time.sleep(1)
        schedule.run_pending()
项目:nyt-nj-campfin    作者:newsdev    | 项目源码 | 文件源码
def main():
    '''with open('log.csv', 'r+') as logfile:
    get_filing_list(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6].lower() == 'true', sys.argv[7].lower() == 'true', None)'''
    # run the scraping job every 6 hours
    # TODO: move scraping interval to environment var
    schedule.every(6).hours.do(get_filing_list, sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[7].lower() == 'true', sys.argv[7].lower() == 'true', None)
    while True:
        schedule.run_pending()
        time.sleep(1)
项目:Chaos    作者:Chaosthebot    | 项目源码 | 文件源码
def schedule_jobs(api, api_twitter):
    schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(
            lambda: poll_pull_requests(api, api_twitter))
    schedule.every(settings.ISSUE_COMMENT_POLLING_INTERVAL_SECONDS).seconds.do(
            lambda: poll_read_issue_comments(api))
    schedule.every(settings.ISSUE_CLOSE_STALE_INTERVAL_SECONDS).seconds.do(
            lambda: poll_issue_close_stale(api))

    # Call manually the first time, so that we are guaranteed this will run
    # at least once in the interval...
    poll_issue_close_stale(api)
项目:Python-Programming-with-Raspberry-Pi    作者:PacktPublishing    | 项目源码 | 文件源码
def init_schedule(self):
        # set the schedule
        schedule.every().day.at(self.start_time).do(self.on)
        schedule.every().day.at(self.stop_time).do(self.off)
项目:spoon    作者:Jiramew    | 项目源码 | 文件源码
def refresher_run(url=None, fetcher=None, database=None, checker=None):
    refresher = Refresher(url_prefix=url, fetcher=fetcher, database=database, checker=checker)
    schedule.every(5).minutes.do(refresher.main)
    while True:
        schedule.run_pending()
        time.sleep(5)
项目:aldente    作者:euniceylee    | 项目源码 | 文件源码
def job():
    trello = TrelloApi('API-KEY-HERE')

  #get cards from trello
    cards = trello.lists.get_card('LIST-ID-HERE')


  # open premade card_names main file in append mode (a+) to keep track of card names
    card_names = open('./card_names.txt', 'a+')


  # open premade tasks file in append mode (a+) to use for printer
    tasks = open('./tasks.txt', 'a+')

  # iterate over each card check that card name is NOT in the card_names file
    for obj in cards:
        # if name is in the card_names file do nothing
        if obj['id'] in open('card_names.txt').read():
            print 'already in the master list'
            pass
        else: # add name into task file and the card_names file
            card_names.write(obj['id']+'\n')
            tasks.write('\n\n\n\n\n'+ obj['name'] + '\n\n\n\n\n:)')
            print "yo i'm printing"


  # print task file if there's content
    if os.stat('tasks.txt').st_size > 0:
        call(['lpr','-o','fit-to-page', 'tasks.txt'])


  # clear task file
    f= open('tasks.txt', 'r+')
    f.truncate()

# schedule job to run the code every 2 seconds
项目:cachet-url-monitor    作者:mtakaki    | 项目源码 | 文件源码
def start(self):
        """Sets up the schedule based on the configuration file."""
        schedule.every(self.configuration.data['frequency']).seconds.do(self.execute)
项目:CSCareerQMegathreadSearch    作者:logicx24    | 项目源码 | 文件源码
def backgroundThread():
    schedule.every().day.at("12:00").do(update)
    #schedule.every(5).minutes.do(update)
    kill_update = threading.Event()

    class SearchUpdateThread(threading.Thread):
        def run(self):
            while not kill_update.is_set():
                schedule.run_pending()
                time.sleep(1*60*60) #Every hour. 

    searchThread = SearchUpdateThread()
    searchThread.setDaemon(True)
    searchThread.start()
项目:discloud    作者:mbouchenoire    | 项目源码 | 文件源码
def send_home_forecast(self) -> None:
        def __periodically_send_morning_forecast__() -> None:
            __send_home_forecast__("sending periodic morning weather forecast...",
                                   lambda weathers: list(filter(lambda w: w.date == datetime.date.today(), weathers)))

        def __periodically_send_evening_forecast__() -> None:
            __send_home_forecast__("sending periodic evening weather forecast...",
                                   lambda weathers: list(filter(lambda w: w.date != datetime.date.today(), weathers)))

        def __send_home_forecast__(logging_header: str, weathers_predicate) -> None:
            logging.debug(logging_header)

            forecast = self._weather_service.get_forecast(self._home_settings.full_name, self._measurement_system)

            forecast.weathers = weathers_predicate(forecast.weathers)

            for channel in self._discord_client.get_all_channels():
                if self.__should_send_forecast__(channel):
                    try:
                        asyncio.get_event_loop().run_until_complete(
                            SendForecastDiscordCommand(self._discord_client, channel, forecast).execute())
                    except discord.HTTPException:
                        msg_template = "failed to send home weather forecast (@{}) to channel {}.{}"
                        msg = msg_template.format(self._home_settings.full_name, channel.server.name, channel.name)
                        logging.exception(msg)

        if self._home_settings.morning_forecast_time is not None:
            schedule.every().day.at(self._home_settings.morning_forecast_time)\
                .do(__periodically_send_morning_forecast__)

        if self._home_settings.evening_forecast_time is not None:
            schedule.every().day.at(self._home_settings.evening_forecast_time)\
                .do(__periodically_send_evening_forecast__)

        while True:
            schedule.run_pending()
            await asyncio.sleep(1)
项目:botcycle    作者:D2KLab    | 项目源码 | 文件源码
def update():
    global bike_info
    try:
        bike_info = update_data(to_update)

    except Exception as e:
        print('something bad happened: ' + str(e))

# schedule execution of update every minute
项目:crickbuzz_cricket_score    作者:LinuxTerminali    | 项目源码 | 文件源码
def schedule_match():
    try:
        schedule.every(30).seconds.do(refresh_Scorecard).tag('score_updates', 'task')
        while 1:
            schedule.run_pending()
            time.sleep(1)
    except KeyboardInterrupt:
        quit()

# method used to cancel the schedule match
项目:thermostat_ita    作者:jpnos26    | 项目源码 | 文件源码
def reloadSchedule():
    with scheduleLock:
        schedule.clear()

        activeSched = None

        with thermostatLock:
            thermoSched = JsonStore( "thermostat_schedule.json" )
            if holdControl != "down" :
                if heatControl.state == "down":
                    activeSched = thermoSched[ "heat" ]  
                    log( LOG_LEVEL_INFO, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_CUSTOM + "/load", "heat" )
            if useTestSchedule: 
                activeSched = getTestSchedule()
                log( LOG_LEVEL_INFO, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_CUSTOM + "/load", "test" )
                print "Using Test Schedule!!!"

        if activeSched != None:
            for day, entries in activeSched.iteritems():
                for i, entry in enumerate( entries ):
                    getattr( schedule.every(), day ).at( entry[ 0 ] ).do( setScheduledTemp, entry[ 1 ] )
                    log( LOG_LEVEL_DEBUG, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_TEXT, "Set " + day + ", at: " + entry[ 0 ] + " = " + str( entry[ 1 ] ) + scaleUnits )


##############################################################################
#                                                                            #
#       Web Server Interface                                                 #
#                                                                            #
##############################################################################

##############################################################################
#      encoding: UTF-8                                                       #
# Form based authentication for CherryPy. Requires the                       #
# Session tool to be loaded.                                                 #
##############################################################################
项目:BYR_BBS_Spider    作者:Dogless-plus    | 项目源码 | 文件源码
def with_heartbeat(fn):
    # timedtask wrapper
    def call_func(*args):
        loop = 0
        fn(*args)
        schedule.every(HEARTBEAT).seconds.do(fn, *args)
        while 1:
            print("#" * 15, "loop:%s" % loop, "#" * 15)
            schedule.run_pending()
            sleep(HEARTBEAT)
            loop += 1

    return call_func
项目:BYR_BBS_Spider    作者:Dogless-plus    | 项目源码 | 文件源码
def with_heartbeat_30s(fn):
    # timedtask wrapper
    def call_func(*args):
        HEARTBEAT = 30
        loop = 0
        fn(*args)
        schedule.every(HEARTBEAT).seconds.do(fn, *args)
        while 1:
            print("#" * 15, "loop:%s" % loop, "#" * 15)
            schedule.run_pending()
            sleep(HEARTBEAT)
            loop += 1

    return call_func
项目:BYR_BBS_Spider    作者:Dogless-plus    | 项目源码 | 文件源码
def with_heartbeat_1d(fn):
    # timedtask wrapper
    def call_func(*args):
        HEARTBEAT = 30
        loop = 0
        fn(*args)
        schedule.every().day.at("10:30").do(fn, *args)
        while 1:
            print("#" * 15, "loop:%s" % loop, "#" * 15)
            schedule.run_pending()
            sleep(HEARTBEAT)
            loop += 1

    return call_func
项目:trax    作者:EliotBerriot    | 项目源码 | 文件源码
def handle(self, *args, **options):
        schedule.every(1).hours.do(tasks.kill_obsolete_timers)
        schedule.every(5).seconds.do(tasks.send_reminders)

        self.stdout.write(self.style.SUCCESS('Starting job runner...'))
        while True:
            time.sleep(1)
            try:
                schedule.run_pending()
            except:
                traceback.print_exc()
项目:RaspberryPiThermostat    作者:scottpav    | 项目源码 | 文件源码
def reloadSchedule():
    with scheduleLock:
        schedule.clear()

        activeSched = None

        with thermostatLock:
            thermoSched = JsonStore( "thermostat_schedule.json" )

            if holdControl.state != "down":
                if heatControl.state == "down":
                    activeSched = thermoSched[ "heat" ]  
                    log( LOG_LEVEL_INFO, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_CUSTOM + "/load", "heat" )
                elif coolControl.state == "down":
                    activeSched = thermoSched[ "cool" ]  
                    log( LOG_LEVEL_INFO, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_CUSTOM + "/load", "cool" )

                if useTestSchedule: 
                    activeSched = getTestSchedule()
                    log( LOG_LEVEL_INFO, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_CUSTOM + "/load", "test" )
                    print "Using Test Schedule!!!"

        if activeSched != None:
            for day, entries in activeSched.iteritems():
                for i, entry in enumerate( entries ):
                    getattr( schedule.every(), day ).at( entry[ 0 ] ).do( setScheduledTemp, entry[ 1 ] )
                    log( LOG_LEVEL_DEBUG, CHILD_DEVICE_SCHEDULER, MSG_SUBTYPE_TEXT, "Set " + day + ", at: " + entry[ 0 ] + " = " + str( entry[ 1 ] ) + scaleUnits )


##############################################################################
#                                                                            #
#       Web Server Interface                                                 #
#                                                                            #
##############################################################################
项目:robin-folios    作者:jshoe    | 项目源码 | 文件源码
def schedule_actions():
    # Example:     nohup python MyScheduledProgram.py &> ~/Desktop/output.log
    # ps auxw to see running ones
    print(datetime.datetime.now())
    print("Starting to run")
    times = ['6:07', '06:24']

    # Buy today's positions
    for set_time in times:
        schedule.every().monday.at(set_time).do(action)
        schedule.every().tuesday.at(set_time).do(action)
        schedule.every().wednesday.at(set_time).do(action)
        schedule.every().thursday.at(set_time).do(action)
        schedule.every().friday.at(set_time).do(action)

    # Sell yesterday's positions
    set_time = '06:01'
    schedule.every().monday.at(set_time).do(sell_scheduled)
    schedule.every().tuesday.at(set_time).do(sell_scheduled)
    schedule.every().wednesday.at(set_time).do(sell_scheduled)
    schedule.every().thursday.at(set_time).do(sell_scheduled)
    schedule.every().friday.at(set_time).do(sell_scheduled)

    while True:
        schedule.run_pending()
        sys.stdout.flush()
        time.sleep(1) # Check every 1 second
项目:xspider    作者:zym1115718204    | 项目源码 | 文件源码
def run(self):
        schedule.every(1).seconds.do(self.run_threaded, self.run_generator_dispatch)
        schedule.every(1).seconds.do(self.run_threaded, self.run_processor_dispatch)
        schedule.every(1).seconds.do(self.run_threaded, self.run_query_project_status)

        while True:
            schedule.run_pending()
            time.sleep(1)
项目:happyfridays    作者:jessicaraepetersen    | 项目源码 | 文件源码
def job():
    db.session.query(UserAlbum).delete()

# schedule.every(1).minutes.do(job)
项目:IBTrader    作者:quocble    | 项目源码 | 文件源码
def schedule_function(self, func, time):
        print("Scheduled function " + func.__name__ + " at " + time)
        schedule.every().day.at(time).do(self.run_on_trading_day, func)
项目:kibitzr    作者:kibitzr    | 项目源码 | 文件源码
def schedule_checks(checkers):
    schedule.clear()
    for checker in checkers:
        conf = checker.conf
        period = conf["period"]
        logger.info(
            "Scheduling checks for %r every %r seconds",
            conf["name"],
            period,
        )
        schedule.every(period).seconds.do(checker.check)
项目:sh8email-django    作者:triplepy    | 项目源码 | 文件源码
def run(self):
        def delete_job():
            try:
                Mail.delete_one_day_ago()
            except:
                logger.exception("Exception raised in MailDeleteBatch#run()#delete_job()")
                raise

        schedule.every().hour.do(delete_job)

        while True:
            schedule.run_pending()
            time.sleep(1)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def write_new_file_and_scrape_all_data(filename, link_list, num_retries = 10):
    """
    Writes a new file, scrapes data from every product link in a list, and
    appends each product's data to the previously created file.
    """

    open_new_file(filename)
    scrape_all_data_from_all_featured_products(filename, link_list, num_retries)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def write_new_file_and_dynamically_scrape_all_data(filename,
                                                   link_list,
                                                   interval,
                                                   num_retries = 10):
    """
    Writes a new file and repeatedly runs the scraper every time the specified
    interval has passed and continuously appends the data to a file.
    """

    open_new_file(filename)
    dynamically_scrape_data(filename, link_list, num_retries, interval)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def write_new_file_update_links_and_dynamically_scrape(filename,
                                                       interval,
                                                       num_retries = 10):
    """
    Writes a new file and repeatedly updates the link list and runs the scraper
    every time the specified interval has passed and continuously appends the
    data to a file.
    """

    open_new_file(filename)
    clean_links_and_dynamically_scrape(filename, interval, num_retries)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def dynamically_scrape_and_append_sales_data(filename,
                                             interval,
                                             num_retries = 10):
    """
    Dynamically scrapes sales data and appends the data to a file by generating
    a list of links, checking it against an old list and only keeping new links,
    and scraping those links for sales data.
    """

    old_list = []

    def job(old_list):
        new_list = collect_all_featured_links()
        new_links = remove_old_links(old_list, new_list)
        bad_links = collect_bad_links(new_links)
        clean_links = remove_bad_links_from_link_list(bad_links, new_links)

        scrape_and_append_sales_data_from_featured_links(filename,
                                                         clean_links,
                                                         num_retries)

        old_list = new_list

    job(old_list)
    schedule.every(interval).hours.do(job)

    while True:
        schedule.run_pending()
        time.sleep(30)

    print "Dynamic scraping finished"
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def scrape_combined_data_from_all_featured_products(data_filename,
                                                    sales_filename,
                                                    link_list,
                                                    num_retries = 10):
    """
    Scrapes all data from every featured product and appends that data to
    their respective files.
    """

    for url in link_list:
        scrape_and_append_combined_data(url,
                                        data_filename,
                                        sales_filename,
                                        num_retries)
项目:ebay    作者:fgscivittaro    | 项目源码 | 文件源码
def dynamically_scrape_combined_data(data_filename,
                                     sales_filename,
                                     interval,
                                     num_retries = 10):
    """
    Dynamically scrapes a continuously updated list of unique clean links and
    appends the data to their respective files.
    """

    old_list = []

    def job(old_list):
        new_list = collect_all_featured_links()
        new_links = remove_old_links(old_list, new_list)
        bad_links = collect_bad_links(new_links)
        clean_links = remove_bad_links_from_link_list(bad_links, new_links)

        scrape_combined_data_from_all_featured_products(data_filename,
                                                        sales_filename,
                                                        clean_links,
                                                        num_retries)

        old_list = new_list

    job(old_list)
    schedule.every(interval).hours.do(job)

    while True:
        schedule.run_pending()
        time.sleep(30)

    print "Dynamic scraping finished"
项目:agent    作者:upd89    | 项目源码 | 文件源码
def main():
    if not _config.is_registered():
        register()
    refreshinstalled()
    system_notify()
    schedule.every(2).hours.do(refreshinstalled)
    schedule.every(10).minutes.do(system_notify)
    schedule.every(30).seconds.do(do_update)

    while True:
        schedule.run_pending()
        sleep(5)