我们从Python开源项目中,提取了以下32个代码示例,用于说明如何使用redis.get()。
def get_connection_amqp(): try: port = int(config.get('ckan.harvest.mq.port', PORT)) except ValueError: port = PORT userid = config.get('ckan.harvest.mq.user_id', USERID) password = config.get('ckan.harvest.mq.password', PASSWORD) hostname = config.get('ckan.harvest.mq.hostname', HOSTNAME) virtual_host = config.get('ckan.harvest.mq.virtual_host', VIRTUAL_HOST) credentials = pika.PlainCredentials(userid, password) parameters = pika.ConnectionParameters(host=hostname, port=port, virtual_host=virtual_host, credentials=credentials, frame_max=10000) log.debug("pika connection using %s" % parameters.__dict__) return pika.BlockingConnection(parameters)
def gevent_queue(q,msg_queue): while True: try: msg = msg_queue.get(block=True) log.debug("PID:%d gevent queue start---------------------->" % os.getpid()) if TEST_PROCESS_NUM > 1 and msg == "OK": for i in range(TEST_PROCESS_NUM-1): msg_queue.put(os.getpid()) log.debug("PID:%d gevent queue call other processes----" % os.getpid()) glist = [] for i in range(GEVENT_NUM): glist.append(gevent.spawn(verify_ip_in_queues,q)) gevent.joinall(glist) l = msg_queue.qsize() for i in range(l): msg_queue.get() log.debug("PID:%d gevent queue end<----------------------" % os.getpid()) except Exception as e: log.error("PID:%d gevent_queue error:%s" % (os.getpid(),e.message))
def answer(m): banlist = redis.sismember('banlist', '{}'.format(m.from_user.id)) if str(banlist) == 'False': try: text = bot.get_chat_member(m.chat.id, m.from_user.id).status id = m.from_user.id rank = redis.hget("user:rank","{}".format(id)) msgs = redis.get("{}".format(id)) name = m.from_user.first_name user = m.from_user.username photo = redis.hget('stickers',id) bot.send_message(m.chat.id, "`Name` : *{}* \n `UserName` = *{}* \n `GlobalRank` : *{}* \n `Position In Group` : *{}* \n\n `Msgs` : *{}*".format(name,user,rank,text,msgs), parse_mode="Markdown") bot.send_sticker(m.chat.id,photo) except: bot.send_photo(m.chat.id, 'AgADBAADq6cxG3LsuA4NhfzrLPeDz-qCWBkABEgaS8eAZRQfsEkBAAEC',caption="Please Submit One Sticker For Your") #################################################################################################################################################################################################
def gif(m): banlist = redis.sismember('banlist', '{}'.format(m.from_user.id)) if str(banlist) == 'False': text = m.text.replace("/imdb ","") r = requests.get('http://www.omdbapi.com/?t={}'.format(text)) json_data = r.json() Year = json_data['Year'] Title = json_data['Title'] Released = json_data['Released'] Runtime = json_data['Runtime'] Genre = json_data['Genre'] Director = json_data['Director'] Language = json_data['Language'] Poster = json_data['Poster'] urllib.urlretrieve("{}".format(Poster), "imdb.png") bot.send_sticker(m.chat.id, open('imdb.png')) bot.send_message(m.chat.id, "*Title* : ``` {}``` \n *Year* : ``` {}```\n *Published* : ``` {}``` \n *Runtime* : ``` {}``` \n *Genre* : ``` {}``` \n *Director* : ``` {}``` \n *Language* : ```{}```".format(Title,Year,Released,Runtime,Genre,Director,Language), parse_mode="Markdown") #################################################################################################################################################################################################
def ip(m): banlist = redis.sismember('banlist', '{}'.format(m.from_user.id)) if str(banlist) == 'False': text = m.text.split()[1] r = requests.get('http://ip-api.com/json/{}?fields=262143'.format(text)) json_data = r.json() country = json_data['country'] city = json_data['city'] isp = json_data['isp'] timezone = json_data['timezone'] lon = json_data['lon'] lat = json_data['lat'] bot.send_location(m.chat.id, lat, lon) bot.send_message(m.chat.id, "*Country* : ```{}``` \n *City* : ```{}``` \n *Isp* : ```{}``` \n *Timezone* : ```{}```".format(country,city,isp,timezone), parse_mode="Markdown") #################################################################################################################################################################################################
def qq(q): l = q.query markdown = types.InlineQueryResultArticle('1', 'Markdown', types.InputTextMessageContent('{}'.format(l),parse_mode='Markdown'),thumb_url='http://uupload.ir/files/cd0k_m.jpg', description='Send Text With Markdown Styles') html = types.InlineQueryResultArticle('2', 'HTML', types.InputTextMessageContent('{}'.format(l),parse_mode='HTML'),thumb_url='http://uupload.ir/files/dc49_h.jpg', description='Send Text With HTML Styles') r = requests.get('https://api.github.com/users/{}'.format(l)) json_data = r.json() if 'avatar_url' in json_data: url_html = json_data['html_url'] typee = json_data['type'] name = json_data['name'] company = json_data['company'] blog = json_data['blog'] location = json_data['location'] bio = json_data['bio'] public_repos = json_data['public_repos'] followers = json_data['followers'] following = json_data['following'] avatar_url = json_data['avatar_url'] a = q.query avatar = types.InlineQueryResultPhoto('3', '{}'.format(avatar_url), '{}'.format(avatar_url), description='avatar', caption='Name : {}\nUrl : {}\nBlog : {}\nLocation : {}\nBio : {}\n\nRepos : {}\nFollowers : {}\nFollowing : {}'.format(name,url_html,blog,location,bio,public_repos,followers,following)) avtar = types.InlineQueryResultPhoto('4', '{}'.format(a), '{}'.format(a), description='avatar', caption='aaa') bot.answer_inline_query(q.id, [markdown, html, avatar], cache_time=1) #################################################################################################################################################################################################
def gif(m): r = requests.get('http://exchange.nalbandan.com/api.php?action=json') json_data = r.json() date = json_data['dollar']['date'] dollar = json_data['dollar']['persian'] dollar1 = json_data['dollar']['value'] dollar_rasmi = json_data['dollar_rasmi']['persian'] dollar_rasmi1 = json_data['dollar_rasmi']['value'] euro = json_data['euro']['persian'] euro1 = json_data['euro']['value'] gold_per_geram = json_data['gold_per_geram']['persian'] gold_per_geram1 = json_data['gold_per_geram']['value'] coin_new = json_data['coin_new']['persian'] coin_new1 = json_data['coin_new']['value'] pond = json_data['pond']['persian'] pond1 = json_data['pond']['value'] derham = json_data['coin_old']['persian'] derham1 = json_data['coin_old']['value'] coin_old = json_data['coin_old']['persian'] coin_old1 = json_data['coin_old']['value'] bot.send_message(m.chat.id, "???? ??? ???? ???? ?? ????? : ``` {}``` \n ?? ??? ??? ??? : \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? \n\n {} ?? ???? {} ???? ".format(date,dollar,dollar1,dollar_rasmi,dollar_rasmi1,euro,euro1,gold_per_geram,gold_per_geram1,coin_new,coin_new1,pond,pond1,derham,derham1,coin_old,coin_old1), parse_mode="Markdown") #################################################################################################################################################################################################
def finish_test(results, repo, ref): base_repo = redis.get("source:" + repo).decode("utf-8") l = redis.lock(base_repo) l.local.token = results[0][0] print("releasing lock " + base_repo) try: l.release() except redis.exceptions.LockError: print("lock already released") test_config_ok = True tests_ok = True for result in results: test_config_ok = test_config_ok and result[1] tests_ok = tests_ok and result[2] if not test_config_ok: final_status(repo, ref, "error", "An error occurred while running the tests.") elif not tests_ok: final_status(repo, ref, "failure", "One or more tests failed.") else: final_status(repo, ref, "success", "All tests passed.")
def verify_ip_in_queues(q): r = redis.StrictRedis(REDIS_SERVER,REDIS_PORT,DB_FOR_IP) while True: try: item = q.get(timeout=QUEUE_TIMEOUT) log.debug("PID:%d verify_ip_in_queues dict infos:%s" % (os.getpid(),json.dumps(item))) #print "ip test:",item times = 0 #while times < PROXY_RETRY_TIMES: ret,time = test_url(item["ip_port"],item["type"],r) #log.debug("PID:%d queue ip:%s result:%d"%(os.getpid(),item["ip"],ret)) if ret: if item.has_key("dest_cache"): r.sadd(item["dest_cache"],item["ip_port"]) else: db_insert(item["ip_port"],item["type"],time,r) # if check db data,need not to check DEST_URL list if item["db_flag"]: continue # test dest url for i in range(len(DEST_URL)): flag,time = test_dest_url(item["ip_port"],item["type"],DEST_URL[i],r) if flag: db_insert_dest(DEST_URL[i]["name"],item["ip_port"],item["type"],time,r) else: if item["db_flag"]: log.debug("PID:%d queue ip delete:%s"%(os.getpid(),item["ip_port"])) db_delete(item["ip_port"],r) #times += 1 except Exception as e: log.error("PID:%d queue error:%s" % (os.getpid(),e.message)) #break return
def clac(m): text = m.text.replace("/get","") link = redis.get("help") bot.send_message(m.chat.id, "{}".format(link), parse_mode="Markdown") #################################################################################################################################################################################################
def ip(m): banlist = redis.sismember('banlist', '{}'.format(m.from_user.id)) if str(banlist) == 'False': text = m.text.split()[1] r = requests.get('http://bot-negative23.rhcloud.com/s.php?text={}'.format(text)) json_data = r.json() code = json_data['base64'] bot.send_message(m.chat.id, "`{}`".format(code), parse_mode="Markdown") #################################################################################################################################################################################################
def get_connection(): backend = config.get('ckan.harvest.mq.type', MQ_TYPE) if backend in ('amqp', 'ampq'): # "ampq" is for compat with old typo return get_connection_amqp() if backend == 'redis': return get_connection_redis() raise Exception('not a valid queue type %s' % backend)
def get_connection_redis(): import redis return redis.StrictRedis(host=config.get('ckan.harvest.mq.hostname', HOSTNAME), port=int(config.get('ckan.harvest.mq.port', REDIS_PORT)), db=int(config.get('ckan.harvest.mq.redis_db', REDIS_DB)))
def get_gather_queue_name(): return 'ckan.harvest.{0}.gather'.format(config.get('ckan.site_id', 'default'))
def get_fetch_queue_name(): return 'ckan.harvest.{0}.fetch'.format(config.get('ckan.site_id', 'default'))
def get_fetch_routing_key(): return 'ckanext-harvest:{0}:harvest_object_id'.format( config.get('ckan.site_id', 'default'))
def purge_queues(): backend = config.get('ckan.harvest.mq.type', MQ_TYPE) connection = get_connection() if backend in ('amqp', 'ampq'): channel = connection.channel() channel.queue_purge(queue=get_gather_queue_name()) log.info('AMQP queue purged: %s', get_gather_queue_name()) channel.queue_purge(queue=get_fetch_queue_name()) log.info('AMQP queue purged: %s', get_fetch_queue_name()) elif backend == 'redis': get_gather_consumer().queue_purge() log.info('Redis gather queue purged') get_fetch_consumer().queue_purge() log.info('Redis fetch queue purged')
def resubmit_jobs(): ''' Examines the fetch and gather queues for items that are suspiciously old. These are removed from the queues and placed back on them afresh, to ensure the fetch & gather consumers are triggered to process it. ''' if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() # fetch queue harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 minutes for fetch and import max if (datetime.datetime.now() - date_of_key).seconds > 180: redis.rpush(get_fetch_routing_key(), json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) # gather queue harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 hours for a gather if (datetime.datetime.now() - date_of_key).seconds > 7200: redis.rpush(get_gather_routing_key(), json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def get_publisher(routing_key): connection = get_connection() backend = config.get('ckan.harvest.mq.type', MQ_TYPE) if backend in ('amqp', 'ampq'): channel = connection.channel() channel.exchange_declare(exchange=EXCHANGE_NAME, durable=True) return Publisher(connection, channel, EXCHANGE_NAME, routing_key=routing_key) if backend == 'redis': return RedisPublisher(connection, routing_key)
def get_consumer(queue_name, routing_key): connection = get_connection() backend = config.get('ckan.harvest.mq.type', MQ_TYPE) if backend in ('amqp', 'ampq'): channel = connection.channel() channel.exchange_declare(exchange=EXCHANGE_NAME, durable=True) channel.queue_declare(queue=queue_name, durable=True) channel.queue_bind(queue=queue_name, exchange=EXCHANGE_NAME, routing_key=routing_key) return channel if backend == 'redis': return RedisConsumer(connection, routing_key)
def get_key(key, callback): async def wrapper(): with await connection as redis: value = await redis.get(key) try: callback(value.decode()) except: callback(value) asyncio.ensure_future(wrapper())
def get_key_blocking(key, default=None): return blocking_connection.get(key) or str(default).encode()
def dump_all(redis=r): keys = redis.keys('*') pairs = {} for key in keys: type = redis.type(key) val = redis.get(key) try: pairs[key] = eval(val) except Exception as e: print pairs, key, val, e return pairs
def load_code(repo, ref): print("loading code from " + repo) os.chdir(cwd) # Look up our original repo so that we only load objects once. base_repo = redis.get("source:" + repo) if base_repo is None: r = requests.get("https://api.github.com/repos/" + repo, auth=(config["overall"]["github-username"], github_personal_access_token)) r = r.json() base_repo = "source" if "source" in r: base_repo = r["source"]["full_name"] redis.set("source:" + repo, base_repo) if base_repo is "source": base_repo = repo if type(base_repo) is bytes: base_repo = base_repo.decode("utf-8") print("Source repo of " + repo + " is " + base_repo) repo_path = "repos/" + base_repo github_base_url = "https://github.com/" + base_repo + ".git" github_head_url = "https://github.com/" + repo + ".git" print("waiting for repo lock") with redis.lock(base_repo, timeout=5*60, blocking_timeout=20*60): if not os.path.isdir(repo_path): os.makedirs(repo_path) git.clone(github_base_url, repo_path) # We must make .tmp after cloning because cloning will fail when the # directory isn't empty. os.makedirs(repo_path + "/.tmp") os.chdir(repo_path) git.fetch(github_head_url, ref) print("loaded", repo, ref)
def start_test(self, repo, ref): base_repo = redis.get("source:" + repo).decode("utf-8") l = redis.lock(base_repo, timeout=60 * 60) log_key = "log:" + repo + "/" + ref log_url = "https://rosie-ci.ngrok.io/log/" + repo + "/" + ref print("grabbing lock " + base_repo) # Retry the task in 10 seconds if the lock can't be grabbed. if not l.acquire(blocking=False): if self.request.retries == 24: set_status(repo, ref, "error", log_url, "Hit max retries. Please ping the owner.") raise self.retry(countdown=30, max_retries=25) print("Lock grabbed " + base_repo) redis.set("owner-" + base_repo, log_url) set_status(repo, ref, "pending", log_url, "Commencing Rosie test.") repo_path = cwd + "/repos/" + base_repo os.chdir(repo_path) try: redis.append(log_key, git.checkout(ref)) except sh.ErrorReturnCode_128 as e: print("error 128") redis.append(log_key, e.full_cmd + "\n" + e.stdout.decode('utf-8') + "\n" + e.stderr.decode('utf-8')) final_status(repo, ref, "error", "Git error in Rosie.") except sh.ErrorReturnCode_1 as e: print("error 1") redis.append(log_key, e.full_cmd + "\n" + e.stdout.decode('utf-8') + "\n" + e.stderr.decode('utf-8')) final_status(repo, ref, "error", "Git checkout error in Rosie.") print("test started " + log_url) return l.local.token.decode("utf-8")
def _get_travis_public_key(): response = requests.get("https://api.travis-ci.org/config", timeout=10.0) response.raise_for_status() return response.json()['config']['notifications']['webhook']['public_key']
def log(owner, repo, sha): l = redis.get("log:" + owner + "/" + repo + "/" + sha) if not l: abort(404) return Response(l, mimetype='text/plain; charset=utf-8')
def test_dest_url(ip,is_http,dest_infos,redis=None): name = dest_infos["name"] url = dest_infos["url"] store_cookies = dest_infos["store_cookies"] use_default_cookies = dest_infos["use_default_cookies"] pro = {TYPES[is_http]:ip} time = 0 flag= False try: r = None cookie_old = None r_cookies_key = "%s:%s" % (name,ip) if store_cookies and redis != None: cookie_old = redis.get(r_cookies_key) #print "old cookie:",cookie if cookie_old != None and cookie_old != "None" and cookie_old != "{}": #print "use cookie" log.debug("PID:%d IP:%s use old cookies:%s " % (os.getpid(),ip,cookie_old)) cookies = cookiejar_from_dict(json.loads(cookie_old)) r = requests.get(url,proxies=pro,cookies=cookies,timeout=SOKCET_TIMEOUT) else: if use_default_cookies: rand_cookies = dest_infos["default_cookies"] log.debug("PID:%d IP:%s use random cookies:%s " % (os.getpid(),ip,str(rand_cookies))) cookie = cookiejar_from_dict(rand_cookies) r = requests.get(url,proxies=pro,cookies=cookie,timeout=SOKCET_TIMEOUT) else: r = requests.get(url,proxies=pro,timeout=SOKCET_TIMEOUT) else: if use_default_cookies: cookie = cookiejar_from_dict(dest_infos["default_cookies"]) r = requests.get(url,proxies=pro,cookies=cookie,timeout=SOKCET_TIMEOUT) else: r = requests.get(url,proxies=pro,timeout=SOKCET_TIMEOUT) time += r.elapsed.microseconds/1000 log.debug("PID:%d dest url:%s proxy ip:%s result:%d time:%d type:%s" % (os.getpid(),url,ip,r.status_code,time,TYPES[is_http])) if r.ok: flag = True if store_cookies and redis != None: #print "new cookies:",r.cookies if r.cookies != None : cookie = json.dumps(dict_from_cookiejar(r.cookies)) if cookie and cookie != "{}" and cookie_old != cookie: log.debug("PID:%d IP:%s new cookies:%s old cookies:%s" % (os.getpid(),ip,cookie,cookie_old)) redis.set(r_cookies_key,cookie) except Exception as e: log.debug("PID:%d error:%s" % (os.getpid(),e.message)) return flag,time
def test_url(ip,is_http,redis=None): pro = {TYPES[is_http]:ip} #if redis == None: # redis = redis.StrictRedis(REDIS_SERVER,REDIS_PORT,DB_FOR_IP) time = 0 flag= False try: #print "test url:",i,ip,pro r = None cookie_old = None if STORE_COOKIE and redis != None: cookie_old = redis.get(ip) #print "old cookie:",cookie if cookie_old != None and cookie_old != "None" and cookie_old != "{}": #print "use cookie" log.debug("PID:%d IP:%s use old cookies:%s " % (os.getpid(),ip,cookie_old)) cookies = cookiejar_from_dict(json.loads(cookie_old)) r = requests.get(TEST_URL,proxies=pro,cookies=cookies,timeout=SOKCET_TIMEOUT) else: if USE_DEFAULT_COOKIE: rand_cookies = {"bid":random_str()} log.debug("PID:%d IP:%s use random cookies:%s " % (os.getpid(),ip,str(rand_cookies))) cookie = cookiejar_from_dict(rand_cookies) r = requests.get(TEST_URL,proxies=pro,cookies=cookie,timeout=SOKCET_TIMEOUT) else: r = requests.get(TEST_URL,proxies=pro,timeout=SOKCET_TIMEOUT) else: if USE_DEFAULT_COOKIE: cookie = cookiejar_from_dict({"bid":random_str()}) r = requests.get(TEST_URL,proxies=pro,cookies=cookie,timeout=SOKCET_TIMEOUT) else: r = requests.get(TEST_URL,proxies=pro,timeout=SOKCET_TIMEOUT) time += r.elapsed.microseconds/1000 log.debug("PID:%d Test IP:%s result:%d time:%d type:%s" % (os.getpid(),ip,r.status_code,time,TYPES[is_http])) if r.ok: flag = True if STORE_COOKIE and redis != None: #print "new cookies:",r.cookies if r.cookies != None : cookie = json.dumps(dict_from_cookiejar(r.cookies)) if cookie and cookie != "{}" and cookie_old != cookie: log.debug("PID:%d IP:%s new cookies:%s old cookies:%s" % (os.getpid(),ip,cookie,cookie_old)) redis.set(ip,cookie) except Exception as e: log.debug("PID:%d error:%s" % (os.getpid(),e.message)) return flag,time
def query_text(query): user = query.from_user.username name = query.from_user.first_name lname = query.from_user.last_name uid = query.from_user.id markup = types.InlineKeyboardMarkup() markup.add(types.InlineKeyboardButton('{}'.format(lname), url="https://telegram.me/{}".format(user))) thumb_url = 'http://uupload.ir/files/7d23_download.png' info = types.InlineQueryResultArticle('1', 'Your Info', types.InputTextMessageContent('` Username` : @{}\nYour ` First Name` : *{}*\n` Your LastName` : *{}*\n` Your ID` : *{}*'.format(user,name,lname,uid), parse_mode="Markdown"), reply_markup=markup, thumb_url=thumb_url) text = urllib.urlopen("http://vip.opload.ir/vipdl/94/11/amirhmz/joke.db").read() text1 = text.split(",") last = random.choice(text1) joke = types.InlineQueryResultArticle('4', 'Joke', types.InputTextMessageContent('{}'.format(last)),thumb_url='http://uupload.ir/files/sfxc_download.jpg') url = req.get('http://exchange.nalbandan.com/api.php?action=json') data = url.json() date = data['dollar']['date'] dollar = data['dollar']['persian'] dollar1 = data['dollar']['value'] dollar_rasmi = data['dollar_rasmi']['persian'] dollar_rasmi1 = data['dollar_rasmi']['value'] euro = data['euro']['persian'] euro1 = data['euro']['value'] gold_per_geram = data['gold_per_geram']['persian'] gold_per_geram1 = data['gold_per_geram']['value'] coin_new = data['coin_new']['persian'] coin_new1 = data['coin_new']['value'] pond = data['pond']['persian'] pond1 = data['pond']['value'] derham = data['coin_old']['persian'] derham1 = data['coin_old']['value'] coin_old = data['coin_old']['persian'] coin_old1 = data['coin_old']['value'] time_tmp = 'http://uupload.ir/files/66yl_download_(2).png' dollar = types.InlineQueryResultArticle('3', 'Dollar', types.InputTextMessageContent("???? ??? ???? ???? ?? ????? : ``` {}``` \n ?? ??? ??? ??? : \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? \n\n `{}` ?? ???? *{}* ???? ".format(date,dollar,dollar1,dollar_rasmi,dollar_rasmi1,euro,euro1,gold_per_geram,gold_per_geram1,coin_new,coin_new1,pond,pond1,derham,derham1,coin_old,coin_old1), parse_mode='Markdown'), thumb_url=time_tmp) url = req.get('http://api.gpmod.ir/time/') data = url.json() FAdate = data['FAdate'] FAtime = data['FAtime'] ENdate = data['ENdate'] ENtime = data['ENtime'] time_tmp = 'http://uupload.ir/files/zneb_download_(1).png' timesend = types.InlineQueryResultArticle('2', 'Time', types.InputTextMessageContent('`{}` : *????* `{}` \n\n `{}` *Time* : `{}`'.format(FAdate,FAtime,ENdate,ENtime), parse_mode='Markdown'), thumb_url=time_tmp) bot.answer_inline_query(query.id, [info, dollar, joke, timesend], cache_time=5, switch_pm_text='Start bot') ################################################################################################################################################################################################# # -*- coding: utf-8 -*-
def handle_text_message(event): text = event.message.text sourceId = getSourceId(event.source) matcher = re.match(r'^#(\d+) (.+)', text) if text == '???': poker_mutex = Mutex(redis, POKER_MUTEX_KEY_PREFIX+ sourceId) poker_mutex.lock() if poker_mutex.is_lock(): number = str(redis.incr(sourceId)).encode('utf-8') line_bot_api.reply_message( event.reply_token, generate_planning_poker_message(number)) time.sleep(POKER_MUTEX_TIMEOUT) if poker_mutex.is_lock(): poker_mutex.unlock() elif matcher is not None: number = matcher.group(1) value = matcher.group(2) current = redis.get(sourceId).encode('utf-8') vote_key = sourceId + number status = redis.hget(vote_key, 'status') if status is None: if number != current: line_bot_api.reply_message( event.reply_token, TextMessage(text=MESSAGE_INVALID_VOTE.format(number))) return poker_mutex = Mutex(redis, POKER_MUTEX_KEY_PREFIX+ sourceId) vote_mutex = Mutex(redis, VOTE_MUTEX_KEY_PREFIX + sourceId) location = mapping.keys()[mapping.values().index(value)] vote_mutex.lock() if vote_mutex.is_lock(): time.sleep(VOTE_MUTEX_TIMEOUT) redis.hincrby(vote_key, location) line_bot_api.reply_message( event.reply_token, genenate_voting_result_message(vote_key) ) redis.hset(vote_key, 'status', 'complete') vote_mutex.unlock() poker_mutex.release() else: redis.hincrby(vote_key, location) else: line_bot_api.reply_message( event.reply_token, TextMessage(text=MESSAGE_END_POKER.format(number)))
def travis(): signature = base64.b64decode(request.headers.get('Signature')) try: public_key = _get_travis_public_key() except requests.Timeout: print("Timed out when attempting to retrieve Travis CI public key") abort(500) except requests.RequestException as e: print("Failed to retrieve Travis CI public key") abort(500) try: check_authorized(signature, public_key, request.form["payload"]) except SignatureError: abort(401) data = json.loads(request.form["payload"]) repo = data["repository"]["owner_name"] + "/" + data["repository"]["name"] build_number = data["id"] sha = data["commit"] if data["type"] == "pull_request": sha = data["head_commit"] tag = None if data["type"] == "push" and data["tag"] != None: tag = data["tag"] print(data) key = sha if tag is not None: key = tag upload_lock = "upload-lock:" + sha if data["state"] in ("started", ): print("travis started", key) # Handle pulls differently. if data["pull_request"]: load_code.delay(repo, "pull/" + str(data["pull_request_number"]) + "/head") elif data["tag"]: load_code.delay(repo, "refs/tags/" + tag) else: load_code.delay(repo, "refs/heads/" + data["branch"]) redis.setex(upload_lock, 20 * 60, "locked") set_status(repo, sha, "pending", data["build_url"], "Waiting on Travis to complete.") elif data["state"] in ("passed", "failed"): print("travis finished") key = repo + "/" + key set_status(repo, sha, "pending", "https://rosie-ci.ngrok.io/log/" + key, "Queueing Rosie test.") redis.delete(upload_lock) test_commit(repo, sha, tag) elif data["state"] is ("cancelled", ): print("travis cancelled") redis.delete(upload_lock) set_status(repo, sha, "error", data["build_url"], "Travis cancelled.") elif data["status"] is None: set_status(repo, sha, "error", data["build_url"], "Travis error.") else: print("unhandled state:", data["state"]) print(data) return jsonify({'status': 'received'})