def run(self,ipdict,pinglist,threads,file):
if len(ipdict['mysql']):
printPink("crack mysql now...")
print "[*] start crack mysql %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['mysql']:
pool.apply_async(func=self.mysq1,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop crack mysql %s" % time.ctime()
print "[*] crack mysql done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
python类Pool()的实例源码
def pop_main(ipdict,threads):
printPink("crack pop now...")
print "[*] start crack pop %s" % time.ctime()
starttime=time.time()
global lock
lock = threading.Lock()
global result
result=[]
pool=Pool(threads)
for ip in ipdict['pop3']:
pool.apply_async(func=pop3_l,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop pop serice %s" % time.ctime()
print "[*] crack pop done,it has Elapsed time:%s " % (time.time()-starttime)
return result
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['mongodb']):
printPink("crack mongodb now...")
print "[*] start crack mongodb %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['mongodb']:
pool.apply_async(func=self.mongoDB,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop mongoDB serice %s" % time.ctime()
print "[*] crack mongoDB done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
printPink("crack snmp now...")
print "[*] start crack snmp %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in pinglist:
pool.apply_async(func=self.snmp_l,args=(str(ip).split(':')[0],""))
pool.close()
pool.join()
print "[*] stop crack snmp %s" % time.ctime()
print "[*] crack snmp done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['rsync']):
printPink("crack rsync now...")
print "[*] start crack rsync %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['rsync']:
pool.apply_async(func=self.rsync_creak,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop rsync serice %s" % time.ctime()
print "[*] crack rsync done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['ldap']):
printPink("crack ldap now...")
print "[*] start ldap %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['ldap']:
pool.apply_async(func=self.ldap_creak,args=(str(ip).split(':')[0],str(ip).split(':')[1]))
pool.close()
pool.join()
print "[*] stop ldap serice %s" % time.ctime()
print "[*] crack ldap done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['smb']):
printPink("crack smb now...")
print "[*] start crack smb serice %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['smb']:
pool.apply_async(func=self.smb_l,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop smb serice %s" % time.ctime()
print "[*] crack smb done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['http']):
print "[*] start test web burp at %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['http']:
pool.apply_async(func=self.webmain,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop test iip_put&&scanner web paths at %s" % time.ctime()
print "[*] test iip_put&&scanner web paths done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['vnc']):
printPink("crack vnc now...")
print "[*] start crack vnc %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['vnc']:
pool.apply_async(func=self.vnc_l,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop vnc serice %s" % time.ctime()
print "[*] crack vnc done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
def main():
fuuNum = 0
fuuNum_list = []
for url in open('urlid_duo.txt'):
url = url.strip()
fuu_list.append(url)
#?0?1?2...????????fuuNum_list?
#??????url??????url??fuu_list?
#fuuNum_list???????????0???fuu_list???????????url??????url
#????fuuNum_list??????????0??fuu_list?????????
#???????????????????????????~~
fuuNum_list.append(fuuNum)
fuuNum += 1
pool = ThreadPool(totalThread)
pool.map(getFuck, fuuNum_list)#map???fuuNum_list???????????getFuck????
pool.close()
pool.join()
def main(): #???
global totalThread
# getAvailableIp()#????ip
keywordNum = 0
keywordNum_list = []
for kw in open('kw.txt'):#?????
kw = kw.strip()
keyword_list.append(kw)
keywordNum_list.append(keywordNum)
keywordNum += 1
pool = ThreadPool(totalThread)
pool.map(getKeyword, keywordNum_list)
pool.close()
pool.join()
'''
gap = keywordNum/totalThread
thread_list = []
for line in range(0,keywordNum,gap):#10,5
t = threading.Thread(target=getRange,args=(line,line+gap))
t.start()#???
thread_list.append(t)
for tt in thread_list:#????
tt.join()
'''
def cut_Dataset(data_set, parrel=False, nomial=False):
"""
:param data_set:bunch of Dataset
:param parrel: if it is True,cut dataset in parrel.Windows is not available
:param nomial: if nomial is True,only noun-like words will remain
:return:data_set after cutted
"""
from tqdm import tqdm
data_cut = []
start = time.time()
print('cuting dataset......')
if parrel:
p = ThreadPool(9)
p.map(cut_Text, data_set.data)
p.close()
p.join()
else:
n=0
for doc_content in tqdm(data_set.data):
data_cut.append(cut_Text(doc_content, nomial))
end = time.time()
print('cuting runs %0.2f seconds.' % (end - start))
data_set.data = data_cut
def reprojectToThisThreaded(self, sourceProjection, numThreads):
uvList = []
fx = float(self.imsize[0])
fy = float(self.imsize[1])
angleList = [self.angular_position((float(i)/fx,float(j)/fy)) for i in range(self.imsize[0]) for j in range(self.imsize[1])]
poolAngles = ThreadPool(numThreads)
image = poolAngles.map(sourceProjection.pixel_value, angleList)
poolAngles.close()
poolAngles.join()
idx = 0
for x in range(self.imsize[0]):
for y in range(self.imsize[1]):
pixel = image[idx]
if pixel is None:
print x,y
else:
self.image[y,x] = pixel
idx = idx + 1
def pick_proxies(proxy_list, test_url, timeout):
'''
????????????????????????
'''
proxy_num = len(proxy_list)
# ????
pool = Pool(16) # ?????
kwargs = [{'test_url': test_url, 'proxy': proxy, 'timeout': timeout} for proxy in proxy_list] # ????
response_time_list = pool.map(multi_test_wrapper, kwargs) # ????
# ????????
map_list = [] # (????, ????)?????????????????????
for i in xrange(proxy_num):
if response_time_list[i] < timeout:
map_list.append((i, response_time_list[i]))
# ???????
# map_list = sorted(map_list, key=lambda d: d[1])
# ?????????
new_proxy_list = []
for map_ in map_list:
new_proxy_list.append(proxy_list[map_[0]])
# print proxies_list[map_[0]], map_[1], '?'
return new_proxy_list
def getRandomArticlesFromCategory(self, categoryName, sampleSize = 50, articleNameCompare = None, multi = False):
cacheName = "randomArticleCacheNames/"+categoryName
articles = self.getArticles(categoryName)
articles = [art for art in articles if not art==articleNameCompare]
subSize = min(sampleSize, len(articles))
subNames = random.sample(articles, subSize)
if multi:
_bound_instance_method_alias = functools.partial(_instance_method_alias, self)
pool = ThreadPool(self.cpuCount)
subText = pool.map(_bound_instance_method_alias, subNames)
#subText = pool.map(_bound_instance_method_alias, range(len(subNames)))
else:
subText = [self.getArticleContent(art) for art in subNames]
catlen = len(articles)
'''
with open(cacheName, "w") as f:
for i in subNames:
f.write(i+"\n")
for i, t in enumerate(subText):
with open("randomArticleCache/"+subNames[i], "w") as f:
#print subText[i]
f.write("\n".join(subText[i]))
'''
return catlen, len(subText), subNames, subText
def get_offline_user_data():
print(datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'get_offline_user_data')
if r_session.exists('api_error_info'): return
# if datetime.now().minute < 50: return
offline_users = []
for b_user in r_session.mget(*['user:%s' % name.decode('utf-8') for name in r_session.sdiff('users', *r_session.smembers('global:online.users'))]):
user_info = json.loads(b_user.decode('utf-8'))
username = user_info.get('username')
if not user_info.get('active'): continue
every_hour_key = 'user:%s:cron_queued' % username
if r_session.exists(every_hour_key): continue
offline_users.append(username)
pool = ThreadPool(processes=5)
pool.map(get_data, offline_users)
pool.close()
pool.join()
# ??????????????
def get_offline_user_data():
print(datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'get_offline_user_data')
if r_session.exists('api_error_info'): return
if datetime.now().minute < 50: return
offline_users = []
for b_user in r_session.mget(*['user:%s' % name.decode('utf-8') for name in r_session.sdiff('users', *r_session.smembers('global:online.users'))]):
user_info = json.loads(b_user.decode('utf-8'))
username = user_info.get('username')
if not user_info.get('active'): continue
every_hour_key = 'user:%s:cron_queued' % username
if r_session.exists(every_hour_key): continue
offline_users.append(username)
pool = ThreadPool(processes=5)
pool.map(get_data, offline_users)
pool.close()
pool.join()
# ??????????????
def migrate():
"""migrate data from database to json
"""
conn = sqlite3.connect("CUMTB.db")
res = conn.execute("SELECT * FROM CUMTB")
data = res.fetchall()
json_data = list()
pool = Pool(cpu_count() * 2)
def read_conf(lst):
res = os.popen("./encrypt {0} {1}".format(*lst))
_data = json.loads(res.readline())
json_data.append(_data)
pool.map(read_conf, data)
pool.close()
pool.join()
with open("there_is_nothing.json", 'w') as f:
json.dump(json_data, f, indent=2)
def get_pages_in_category_tree(source, category, count):
pages = set()
seen_categories = set()
current_categories = {category}
while len(pages) < count:
log.debug(len(pages))
if not current_categories:
break
next_categories = set()
with multiprocessing.Pool(processes=len(current_categories)) as pool:
results = pool.map(lambda category: get_category_members(source, category), current_categories)
for result in results:
next_categories.update(result['subcats'])
pages.update(result['pages'])
seen_categories.update(current_categories)
current_categories = next_categories - seen_categories
log.debug(len(pages))
return list(pages)