我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用threading.activeCount()。
def run(self): """Wait & serve. Calls request_handler on every request.""" self.sock.listen(self.backlog) log("Starting Process") running = True while running: if not self.requests_left: # self.sock.shutdown(RDWR) here does NOT help with backlog running = False elif self.requests_left > 0: self.requests_left -= 1 if running: conn, addr = self.sock.accept() threadcount = _threading.activeCount() if threadcount < self.max_threads: log("Accepted connection, starting thread...") t = _threading.Thread(target=self.accept_handler, args=(conn, addr, True)) t.start() else: log("Accepted connection, running in main-thread...") self.accept_handler(conn, addr, False) log("Active Threads: %d" % _threading.activeCount()) self.sock.close() log("Ending Process")
def t_join(m_count): tmp_count = 0 i = 0 if I < m_count: count = len(ip_list) + 1 else: count = m_count while True: time.sleep(4) ac_count = threading.activeCount() #print ac_count,count if ac_count < count and ac_count == tmp_count: i+=1 else: i=0 tmp_count = ac_count #print ac_count,queue.qsize() if (queue.empty() and threading.activeCount() <= 1) or i > 5: break
def to_string(self): worker_rate = {} for w in self.workers: worker_rate[w] = w.get_rtt_rate() w_r = sorted(worker_rate.items(), key=operator.itemgetter(1)) out_str = 'thread num:%d\r\n' % threading.activeCount() for w,r in w_r: out_str += "%s rtt:%d a:%d live:%d processed:%d" % \ (w.ip, w.rtt, w.accept_task, (time.time()-w.ssl_sock.create_time), w.processed_tasks) if w.version == "2": out_str += " streams:%d ping_on_way:%d\r\n" % (len(w.streams), w.ping_on_way) out_str += " Speed:" for speed in w.speed_history: out_str += "%d," % speed out_str += "\r\n" return out_str
def sysinfo(prefix, chan, params): seconds = time.time() - bot.start_time times = [] days = seconds // 86400 hours = seconds // 3600 % 24 minutes = seconds // 60 % 60 seconds = seconds % 60 if days: times.append('%s days' % int(days)) if hours: times.append('%s hours' % int(hours)) if minutes: times.append('%s minutes' % int(minutes)) if seconds: times.append('%s seconds' % int(seconds)) bot.say(chan, 'Uptime: %s Threads: %s' % ( ', '.join(times), threading.activeCount() ))
def test_scaledown(self): """ test that a thread pool will scale down properly :return: """ pool = self.ThreadPool(TestThread, initial_size=10, keywordarg="keywordstring") try: self.assertEqual(threading.activeCount(), 11) pool.scale_down(timeout=1) self.assertEqual(threading.activeCount(), 10) pool.scale_down(timeout=1) self.assertEqual(threading.activeCount(), 9) pool.scale_down(timeout=1) self.assertEqual(threading.activeCount(), 8) pool.scale_down(timeout=1) self.assertEqual(threading.activeCount(), 7) pool.scale_down(timeout=1) finally: pool.safe_terminate()
def list_threads(self, txt): cp_threads = 0 http_threads = 0 for thread in threading.enumerate(): if thread.name.find("CP Server") == 0: cp_threads += 1 if thread.name.find("HTTPServer") == 0: http_threads +=1 self._logger.info("list_threads: {} - Number of Threads: {} (CP Server={}, HTTPServer={}".format(txt, threading.activeCount(), cp_threads, http_threads)) for thread in threading.enumerate(): if thread.name.find("CP Server") != 0 and thread.name.find("HTTPServer") != 0: self._logger.info("list_threads: {} - Thread {}".format(txt, thread.name)) return ################################################################# # Item Methods #################################################################
def _load_dns_servers(self): print '[+] Initializing, validate DNS servers ...' self.dns_servers = [] with open('dict/dns_servers.txt') as f: for line in f: server = line.strip() if not server: continue while True: if threading.activeCount() < 50: t = threading.Thread(target=self._test_server, args=(server,)) t.start() break else: time.sleep(0.1) while threading.activeCount() > 2: time.sleep(0.1) self.dns_count = len(self.dns_servers) sys.stdout.write('\n') print '[+] Found %s available DNS servers in total' % self.dns_count
def _load_dns_servers(self): print '[+] Initializing, validate DNS servers ...' self.dns_servers = [] with open('./subDomain/dict/dns_servers.txt') as f: for line in f: server = line.strip() if not server: continue while True: if threading.activeCount() < 50: t = threading.Thread(target=self._test_server, args=(server,)) t.start() break else: time.sleep(0.1) while threading.activeCount() > 2: time.sleep(0.1) self.dns_count = len(self.dns_servers) sys.stdout.write('\n') print '[+] Found %s available DNS Servers in total' % self.dns_count if self.dns_count == 0: print '[ERROR] No DNS Servers available.' self.STOP_ME = True sys.exit(-1)
def check_bullet_alien_collisions(ai_settings,screen,stats,sb,ship, aliens,bullets): collisions = pygame.sprite.groupcollide(bullets,aliens,True,True) if collisions: for aliens in collisions.values(): stats.score += ai_settings.alien_points * len(aliens) sb.prep_score() check_high_score(stats,sb) #lock.acquire() if len(aliens) == 0 and threading.activeCount() == 1: #print(threading.activeCount()) #print(threading.currentThread().getName() + " start") bullets.empty() ai_settings.increase_speed() #???? stats.level += 1 sb.prep_level() #create_fleet(0,ai_settings,screen,ship,aliens) for i in range(4): t =threading.Thread(target=create_fleet,args=(i,ai_settings,screen,ship,aliens)) t.start() #print(threading.currentThread().getName() + " end") #lock.release()
def scan(self, host, start, stop): self.port = start while self.port <= stop: while threading.activeCount() < MAX_THREADS: Scanner(host, self.port).start() self.port += 1
def run(self): try: url,filename=self.qu1.get() url =url+self.ad #comment this line in case need to download whole web page instead of recipe ONLY... ul.urlretrieve(url,filename) global count except: print " RE-TRYING ", count= count - 1 self.qu1.put((url,filename)) self.run() finally: count= count +1 print str(count)+"("+str( threading.activeCount()) +")",filename self.qu1.task_done()
def run(self,*args): if args: self.entrances = args[0].split(',') for i in self.entrances: self.q.put('http://{}'.format(i)) else: print '[+] Choose Entrances Domain ing ...' self.entrances = random.sample(self.hooks,self.thread_cnt) for i in self.entrances: if not port(i,80): self.reSelect(i) else: self.q.put('http://{}'.format(i)) print "[+] Use : {}".format('?'.join(self.entrances)) for t in xrange(self.thread_cnt): t = threading.Thread(target=self.req) t.setDaemon(True) t.start() while True: if threading.activeCount() <= 1: break else: try: time.sleep(0.1) except KeyboardInterrupt: self.STOP_ME = True raise
def q_map(self): # Creates threads that call q_mapsingle() on each directory (replica) # returns a tuple (mapped_directories, failed_directories) self._mapped = [] self._failed = [] trs = [] for md in self._map_dirs: num = threading.activeCount() - 1 # -1: one thread is the main thread while num == self._nthreads: time.sleep(0.3) num = threading.activeCount() - 1 # -1: one thread is the main thread if num < self._nthreads: trs.append( _Mapthread(self, md) ) trs[-1].start() # wait for threads to finish, save their response for t in trs: while t.isAlive(): t.join(1.0) if t.error: self._failed.append( (t.path, t.error) ) else: self._mapped.append( t.path ) if not self._mapped: errstr="" for m,er in self._failed: errstr += "%s -> %s\n" % (os.path.relpath(m), er) raise QMappingError("All %s directories failed to map!\n%s" % ( len(self.get_mapdirs()), errstr) ) return (self._mapped, self._failed)
def test_old_threading_api(self): # Just a quick sanity check to make sure the old method names are # still present t = threading.Thread() t.isDaemon() t.setDaemon(True) t.getName() t.setName("name") t.isAlive() e = threading.Event() e.isSet() threading.activeCount()
def __init__(self, jvm_started=False, parse_datetime=False, minimum_heap_size='128m', maximum_heap_size='2048m'): """Initializes Duckling. """ self.parse_datetime = parse_datetime self._is_loaded = False self._lock = threading.Lock() if not jvm_started: self._classpath = self._create_classpath() self._start_jvm(minimum_heap_size, maximum_heap_size) try: # make it thread-safe if threading.activeCount() > 1: if jpype.isThreadAttachedToJVM() is not 1: jpype.attachThreadToJVM() self._lock.acquire() self.clojure = jpype.JClass('clojure.java.api.Clojure') # require the duckling Clojure lib require = self.clojure.var("clojure.core", "require") require.invoke(self.clojure.read("duckling.core")) finally: self._lock.release()
def register_counters(cls, stats_mgr): stats_mgr.register_counter("sessions", CommandSession.get_session_count) stats_mgr.register_counter("gc.garbage", lambda: len(gc.garbage)) stats_mgr.register_counter("active_threads", threading.activeCount) stats_mgr.register_counter("cpu_usage_permille", lambda: round(cls._getCpuUsage() * 10))
def test_new_threadpool(self): """ test that a thread pool starts up correctly, and safely terminate :return: """ pool = self.ThreadPool(TestThread,initial_size=1,keywordarg="keywordstring") self.assertEqual(threading.activeCount(),2) pool.safe_terminate()
def test_scaleup(self): """ test that a thread pool will scale up properly :return: """ pool = self.ThreadPool(TestThread, initial_size=1, keywordarg="keywordstring") try: self.assertEqual(threading.activeCount(), 2) pool.scale_up() self.assertEqual(threading.activeCount(), 3) pool.scale_up() self.assertEqual(threading.activeCount(), 4) finally: pool.safe_terminate()
def proxy_updator(): global proxy_object while True: if proxy_object.get_proxy_num()<10: print "?????",threading.activeCount() print "??????",proxy_object.get_proxy_num() proxy_object.add_more_proxyip()#?????????????????????? else: #print "????",proxy_object.get_proxy_num() sleep(1) #?????????
def StartServer(port = 8000): '''Starts the server.''' global SERVER server_address = ('',port) httpd = BuiltinWebServer(server_address, WebServerRequestHandler) SERVER = httpd # We need the following two to be able to interrupt the accept # call, such that the server really terminates: SERVER.ourpid = os.getpid() signal.signal(signal.SIGUSR1,sigusr1handler) # handle signal USR1 while not(SERVER.raus): try: httpd.handle_request() except: Utils.Error("Exception in handle_request: ", prefix="Info: ") etype, value, tb = sys.exc_info() lines = traceback.format_exception(etype,value,tb) Utils.Error(string.join(lines),prefix="") pass Utils.Error("Waiting for threads to terminate...", prefix="Info: ") wait = TERMWAIT while threading.activeCount() > 1 and wait > 0: time.sleep(1) wait = wait - 1 if httpd.restartcommand != '': # for freeing the port the new server will listen Utils.Error("Restarting...", prefix="Info: ") httpd.server_close() os.execl(httpd.restartcommand, httpd.restartcommand) Utils.Error("Terminating...", prefix="Info: ") # finally, we add here a few more WebResponse classes of general interest # a WebResponse with a cached result
def execThread(sshObj,cmd,result,ip): result[ip] = sshObj.cmd(cmd) #print "cmd theading count:%d"%threading.activeCount() #def putThread(sshObj,file,remote_path='.',recursive=False, preserve_times=False):
def sshConn(ip,servers): check = False for i in range(10): try: servers[ip] = ssh.ssh(ip) check = True break except: time.sleep(1) print "connect server %s try again ..."%ip if not check: print "connect server %s failed!"%ip #print "ssh theading count:%d"%threading.activeCount()
def main_loop(self): rlist = [] rlist.append(self.__pipe.inform) timeout = 10 print "Total threads: {0}".format(threading.activeCount()) try: while self.__running: readable, _, _ = select.select(rlist, [], [], timeout) if not readable: continue if self.__pipe.inform in readable: try: message = self.__pipe.read(256) except OSError, exc: logger.warn("[Error %d] appeared at reading pipe" % exc.errno) continue if len(message) == 0: continue pdu_id = message.split()[0].split('.')[-2] pdu_index = self.to_index(int(pdu_id)) logger.info("Assign message to pdu {0}".format(pdu_id)) self.__pdus[pdu_index].handle_message(message) except KeyboardInterrupt: logger.error("Break by user.") except Exception, ex: logger.error("{0}: {1}".format(sys._getframe().f_code.co_name, ex)) finally: logger.info("vIPI Appliance service exits.") self.__pipe.close()
def new_order_notify(self, kexchange, type, maker_only=True, amount=None, price=None): order = super().new_order(kexchange, type, maker_only, amount, price) if order: # self.notify_msg(order['type'], order['price']) t = threading.Thread(target = self.notify_msg, args=(order['type'], order['price'],)) t.start() logging.info("current has %d threads" % (threading.activeCount() - 1))
def t_join(m_count): tmp_count = 0 i = 0 while True: time.sleep(1) ac_count = threading.activeCount() if ac_count < m_count and ac_count == tmp_count: i+=1 else: i = 0 tmp_count = ac_count #print ac_count,queue.qsize() if (queue.empty() and threading.activeCount() <= 1) or i > 5: break
def t_join(m_count): tmp_count = 0 i = 0 while True: time.sleep(2) ac_count = threading.activeCount() if ac_count < m_count and ac_count == tmp_count: i+=1 else: i = 0 tmp_count = ac_count #print ac_count,queue.qsize() if (queue.empty() and threading.activeCount() <= 1) or i > 5: break
def getActiveCount(self): return threading.activeCount() - 1
def get(self): """ ???? """ if self.readBytes < self.totalBytes: shards = range(self.totalBytes) os.mkdir(self.tempdir) for i in range(0, self.totalBytes, self.readBytes): point = shards[i: i + self.readBytes] headers = { "Range": "bytes=%s-%s" % (point[0], point[-1]) } filename = os.path.join(self.tempdir, str(point[-1])) thread = self.threadPool.get() t = thread(target=self.write_to_file, args=[filename, headers]) t.start() while activeCount() > 1: time.sleep(1) results = sorted(map(int, os.listdir(self.tempdir))) with open(self.filename, "ab") as wfd: for result in results: f = os.path.join(self.tempdir, str(result)) with open(f, "rb") as rfd: content = True while content: content = rfd.read(1024) wfd.write(content) self.delete_temp_dir() else: self.write_to_file(filename=self.filename, useThread=False)
def num_threads(): '''add information about the number of threads currently running to the event''' return threading.activeCount() # run factorial. libh_builder comes with some fields already populated # (namely, "version", "num_threads", and "range")
def hack_subprocess(): """subprocess functions may throw exceptions when used in multiple threads. See http://bugs.python.org/issue1731717 for more information. """ global SUBPROCESS_CLEANUP_HACKED if not SUBPROCESS_CLEANUP_HACKED and threading.activeCount() != 1: # Only hack if there is ever multiple threads. # There is no point to leak with only one thread. subprocess._cleanup = lambda: None SUBPROCESS_CLEANUP_HACKED = True
def _map_len(self): return threading.activeCount()
def run(self):#???????????? time.sleep(5) print "current has %d threads\r" % (threading.activeCount() - 1) print 'the arg thread is:%s\r' % self.arg
def run(self):#???????????? time.sleep(5) print 'the arg thread is:%s\r' % self.arg print "current has %d threads" % (threading.activeCount() - 1)
def __init__(self, jars=[], jvm_started=False, mark_time_ranges=False, include_range=False): """Initializes SUTime. """ self.mark_time_ranges = mark_time_ranges self.include_range = include_range self.jars = jars self._is_loaded = False self._lock = threading.Lock() if not jvm_started: self._classpath = self._create_classpath() self._start_jvm() try: # make it thread-safe if threading.activeCount() > 1: if jpype.isThreadAttachedToJVM() is not 1: jpype.attachThreadToJVM() self._lock.acquire() SUTimeWrapper = jpype.JClass( 'edu.stanford.nlp.python.SUTimeWrapper') self._sutime = SUTimeWrapper( self.mark_time_ranges, self.include_range) self._is_loaded = True finally: self._lock.release()
def _load_dns_servers(self): print '[+] Initializing, validate DNS servers ...' self.dns_servers = [] # f=open('./dict/dns_servers.txt','a') with open('./dns_servers.txt') as f: # f=['114.114.114.114','114.114.115.115','180.76.76.76','223.5.5.5','223.6.6.6'] for line in f: server = line.strip() if not server: continue while True: if threading.activeCount() < 50: t = threading.Thread(target=self._test_server, args=(server,)) t.start() break else: time.sleep(0.1) while threading.activeCount() > 2: time.sleep(0.1) self.dns_count = len(self.dns_servers) sys.stdout.write('\n') print '[+] Found %s available DNS Servers in total' % self.dns_count if self.dns_count == 0: print '[ERROR] No DNS Servers available.' self.STOP_ME = True sys.exit(-1)
def _load_dns_servers(self): print('[+] Initializing, validate DNS servers ...') self.dns_servers = [] thread_list = [] # with open('dict/dns_servers.txt') as f: with open('api/dict/dns_servers.txt') as f: for line in f: server = line.strip() if not server: continue while True: if threading.activeCount() < 50: t = threading.Thread(target=self._test_server, args=(server,)) t.setDaemon(True) t.start() thread_list.append(t) break else: time.sleep(0.1) while True: flag_finished = True for i in thread_list: if i.isAlive(): flag_finished = False if flag_finished: break time.sleep(0.1) self.dns_count = len(self.dns_servers) sys.stdout.write('\n') print('[+] Found %s available DNS servers in total' % self.dns_count)
def tearDown(self): self.console.working = False self.console.wait_for_threads() sys.stdout.write("\tactive threads count : %s " % threading.activeCount()) # sys.stderr.write("%s\n" % threading.enumerate())
def run(self): """Runs the queue, goes through all of the actions and their recursive definitions.""" threads = [] for i in range(self.num_threads): threads.append(_Action_Queue_Thread(name = '_Action_Queue_Thread' + str(i), action_queue = self)) for thread in threads: thread.start() print(threading.activeCount()) for thread in threads: thread.join()