def collect_mailids(server):
folders = server.list_folders()
#construct progressbar
progressbar_widgets = [
'[Searching for mails on server] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ']
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(folders)).start()
#collect all mailids for all folders
folder_contents = {}
folder_progress = 0
for flags, delimiter, folder in folders:
#read all mailids for the folder
server.select_folder(folder, readonly=True)
folder_contents[folder] = server.search()
#update progrssbar
folder_progress += 1
progressbar_instance.update(folder_progress)
progressbar_instance.finish()
return folder_contents
python类RotatingMarker()的实例源码
def download(download_list, total_download_size):
progressbar_widgets = [
'[Downloading mails ] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ',
progressbar.ETA(), ' ',
bitmath.integrations.BitmathFileTransferSpeed()]
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=int(total_download_size)).start()
downloaded_size = bitmath.Byte(0)
for folder, mails in download_list.items():
server.select_folder(folder, readonly=True)
for mailid, mailfilename, mailsize in mails:
#make parent directory
if not os.path.isdir(os.path.dirname(mailfilename)):
os.makedirs(os.path.dirname(mailfilename))
#download mail
with open(mailfilename, 'wb') as mailfile:
mailfile.write(server.fetch([mailid], ['RFC822'])[mailid][b'RFC822'])
#update progressbar
downloaded_size += mailsize
progressbar_instance.update(int(downloaded_size))
progressbar_instance.finish()
def deleteHostsByHostgroup(groupname):
hostgroup = zapi.hostgroup.get(output=['groupid'],filter={'name': groupname})
if hostgroup.__len__() != 1:
logger.error('Hostgroup not found: %s\n\tFound this: %s' % (groupname,hostgroup))
groupid = int(hostgroup[0]['groupid'])
hosts = zapi.host.get(output=['name','hostid'],groupids=groupid)
total = len(hosts)
logger.info('Hosts found: %d' % (total))
if ( args.run ):
x = 0
bar = ProgressBar(maxval=total,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
logger.echo = False
for host in hosts:
x = x + 1
bar.update(x)
logger.debug('(%d/%d) >> Removing >> %s' % (x, total, host))
out = zapi.globo.deleteMonitors(host['name'])
bar.finish()
logger.echo = True
else:
logger.info('No host removed due to --no-run arg. Full list of hosts:')
for host in hosts:
logger.info('%s' % host['name'])
return
def hosts_disable_all():
"""
status de host 0 = enabled
status de host 1 = disabled
"""
logger.info('Disabling all hosts, in blocks of 1000')
hosts = zapi.host.get(output=[ 'hostid' ], search={ 'status': 0 })
maxval = int(ceil(hosts.__len__())/1000+1)
bar = ProgressBar(maxval=maxval,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
i = 0
for i in xrange(maxval):
block = hosts[:1000]
del hosts[:1000]
result = zapi.host.massupdate(hosts=[ x for x in block ], status=1)
i += 1
bar.update(i)
bar.finish()
logger.info('Done')
return
def proxy_passive_to_active():
"""
status de prxy 5 = active
status de prxy 6 = passive
"""
logger.info('Change all proxys to active')
proxys = zapi.proxy.get(output=[ 'shorten', 'host' ],
filter={ 'status': 6 })
if ( proxys.__len__() == 0 ):
logger.info('Done')
return
bar = ProgressBar(maxval=proxys.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
i = 0
for x in proxys:
i += 1
proxyid = x['proxyid']
result = zapi.proxy.update(proxyid=proxyid, status=5)
logger.echo = False
logger.debug('Changed from passive to active proxy: %s' % (x['host']))
bar.update(i)
bar.finish()
logger.echo = True
logger.info('Done')
return
def getProgress(self, url, fileSize):
status = json.loads(urllib.urlopen(url).read())
if len(status["data"]) ==0 :
logger.info(url + " upload done ")
return True
widgets = ['Progress: ', Percentage(), ' ', Bar(
marker=RotatingMarker('>-=')), ' ', ETA(), ' ', FileTransferSpeed()]
pbar = ProgressBar(widgets=widgets, maxval=fileSize).start()
upload_size = 0
while upload_size < fileSize:
_response = self.doGet(url)
_data = json.loads(_response)
upload_size = long(_data["data"]["upload_size"])
total_size = long(_data["data"]["total_size"])
if upload_size == 0 and total_size == 0:
break
pbar.update(upload_size)
time.sleep(1)
pbar.finish()
logger.info(url + " upload done")
return True
"""
??????
"""
def download(number, save_dir='./'):
"""Download pre-trained word vector
:param number: integer, default ``None``
:param save_dir: str, default './'
:return: file path for downloaded file
"""
df = load_datasets()
row = df.iloc[[number]]
url = ''.join(row.URL)
if not url:
print('The word vector you specified was not found. Please specify correct name.')
widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()]
pbar = ProgressBar(widgets=widgets)
def dlProgress(count, blockSize, totalSize):
if pbar.max_value is None:
pbar.max_value = totalSize
pbar.start()
pbar.update(min(count * blockSize, totalSize))
file_name = url.split('/')[-1]
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, file_name)
path, _ = urlretrieve(url, save_path, reporthook=dlProgress)
pbar.finish()
return path
def collect_mailinfos(server, folder_contents, outpath_format):
#construct progressbar
progressbar_widgets = [
'[Choosing mails for download ] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA()]
total_count = 0
for folder, mailids in folder_contents.items():
total_count += len(mailids)
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=total_count).start()
#collect all mailinfos
mailinfos = {}
mailinfo_count = 0
for folder, mailids in folder_contents.items():
mailinfos[folder] = []
#get mailinfo bit by bit
server.select_folder(folder, readonly=True)
for mailid in mailids:
#fetch mail information
mailinfo = server.fetch([mailid], ['ENVELOPE', 'INTERNALDATE', 'RFC822.SIZE'])[mailid]
mailsize = bitmath.Byte(mailinfo[b'RFC822.SIZE'])
mailfilename = construct_mailfilename(outpath_format, mailinfo, args.outdir, folder, mailid)
#only add if mailfilename can be constructed
if mailfilename:
mailinfos[folder].append((mailid, mailfilename, mailsize))
mailinfo_count += 1
progressbar_instance.update(mailinfo_count)
progressbar_instance.finish()
return mailinfos
def cleanup(stored_files, stored_dirs, download_list, outdir):
#create list of files to keep
keep_list = []
for folder, mails in download_list.items():
for mailid, mailfilename, mailsize in mails:
keep_list.append(mailfilename)
progressbar_widgets = [
'[Cleaning up outdir ] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ']
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(stored_files)).start()
file_count = 0
#delete all files we don't need to keep
for file in stored_files:
#delete if not on server
if not file in keep_list:
os.remove(file)
#progressbar
file_count += 1
progressbar_instance.update(file_count)
progressbar_instance.finish()
#remove empty folders
possible_empty_folders = True
while possible_empty_folders:
#find all subfolders
stored_dirs = []
for root, dirs, files in os.walk(outdir):
for name in dirs:
stored_dirs.append(os.path.join(root, name))
#delete empty folders indicate next run if one folder was deleted
possible_empty_folders = False
for folder in stored_dirs:
if not os.listdir(folder):
shutil.rmtree(folder)
possible_empty_folders = True
def print_status_stream(title, stream):
widgets = [title, FormatLabel(''), ' ', Percentage(), ' ', Bar(), ' ', RotatingMarker()]
bar = None
if sys.stderr.isatty():
bar = progressbar.ProgressBar(widgets=widgets, max_value=255)
def print_error(status):
print(status['error'])
def print_status(status):
progress = status.get('progressDetail')
if progress:
widgets[1] = FormatLabel("%12s" % (status['status']))
prog = int(round(255 * ((progress['current'] / progress['total']))))
if bar is not None:
bar.update(prog)
def print_unknown(status):
print(status)
for line in stream:
status = json.loads(line.decode('utf8'))
if 'error' in status:
print_error(status)
elif 'status' in status:
print_status(status)
else:
print_unknown(status)
def createSQL(table,values,name='insert'):
'''
Generate the SQL insert line, breaking each insert to up to ~1k values
and up to ~1k insert's (~1M values total for each SQL file)
'''
logger.info('Generating SQL file')
queryInsert='INSERT INTO %s (itemid,clock,num,value_min,value_avg,value_max) VALUES' % table
i=0 # Controls the progress bar
x=0 # Controls number of inserts in one line
y=0 # Controls number of lines in one file
z=0 # Controls number of file name
valuesLen=values.__len__()
sqlFile='%s.sql.%d' % (name,z)
logger.debug('Total itens for %s: %d' % (name,valuesLen))
if valuesLen > 0:
bar=ProgressBar(maxval=valuesLen,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
for value in values:
i+=1
x+=1
if x != 1: # First line only
sqlInsert='%s,%s' % (sqlInsert,value)
else:
sqlInsert=value
if y >= 1000: # If there is more than 1k lines, write to new file
z+=1
y=0
if x >= 1000 or i == valuesLen: # If there is more than 1k values or we finished our list, write to file
sqlFile='%s.sql.%d' % (name,z)
fileAppend(f=sqlFile,content='%s %s;\n' % (queryInsert,sqlInsert))
x=0
y+=1
sqlInsert=''
if args.loglevel.upper() != 'DEBUG': # Dont print progressbar if in debug mode
bar.update(i)
bar.finish()
else:
logger.warning('No values received')
def discovery_disable_all(status=0):
"""
Alterar status de todos os discoveries *auto*
Status 0 = enable
Status 1 = disable
"""
logger.info('Disabling all network discoveries')
druleids = zapi.drule.get(output=[ 'druleid', 'iprange', 'name', 'proxy_hostid', 'status' ],
selectDChecks='extend', filter={ 'status': 0 })
if ( druleids.__len__() == 0 ):
logger.info('Done')
return
bar = ProgressBar(maxval=druleids.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
i = 0
for x in druleids:
params_disable = {
'druleid': x['druleid'],
'iprange': x['iprange'],
'name': x['name'],
'dchecks': x['dchecks'],
'status': 1
}
out = zapi.drule.update(**params_disable)
logger.echo = False
if out:
logger.debug('\tNew status: %s (%s) --> %d' % (x['name'],out['druleids'],status))
else:
logger.warning('\tFAILED to change status: %s (%s) --> %d' % (x['name'],out['druleids'],status))
i += 1
bar.update(i)
logger.echo = True
bar.finish()
logger.info('Done')
return
def desabilitaItensNaoSuportados():
query = {
"output": "extend",
"filter": {
"state": 1
},
"monitored": True
}
filtro = raw_input('Qual a busca para key_? [NULL = ENTER] ')
if filtro.__len__() > 0:
query['search']={'key_': filtro}
limite = raw_input('Qual o limite de itens? [NULL = ENTER] ')
if limite.__len__() > 0:
try:
query['limit']=int(limite)
except:
print 'Limite invalido'
raw_input("Pressione ENTER para voltar")
main()
opcao = raw_input("Confirma operação? [s/n]")
if opcao == 's' or opcao == 'S':
itens = zapi.item.get(query)
print 'Encontramos {} itens'.format(itens.__len__())
bar = ProgressBar(maxval=itens.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
i = 0
for x in itens:
result = zapi.item.update({"itemid": x['itemid'], "status": 1})
i += 1
bar.update(i)
bar.finish()
print "Itens desabilitados!!!"
raw_input("Pressione ENTER para continuar")
main()
def __init__(self):
self.stop_running = threading.Event()
self.progress_thread = threading.Thread(target=self.init_progress)
self.progress_thread.daemon = True
spinner = RotatingMarker()
spinner.INTERVAL = datetime.timedelta(milliseconds=100)
self.widgets = [spinner, ' ', Percentage(), ' ', FormatLabel('Calculating patch requirements'), ' ', Bar(), ' ', FormatLabel('')]
self.progress = ProgressBar(redirect_stdout=True, widgets=self.widgets, max_value=100)
self.progress.update(0)
def doUploadFileProgress(self,filePath, url):
startTime = getNow()
result = False
try:
widgets = ['Progress: ', Percentage(), ' ', Bar(
marker=RotatingMarker('>-=')), ' ', ETA(), ' ', FileTransferSpeed()]
pbar = ProgressBar(widgets=widgets, maxval=os.path.getsize(filePath)).start()
progress = Progress()
fileSizeStr = formatSize(os.path.getsize(filePath))
logger.info("??????{0} ?? {1}".format(filePath,fileSizeStr))
stream = file_with_callback(filePath, 'rb', progress.update,os.path.basename(filePath),pbar)
params = {"filedata": stream}
datagen, headers = multipart_encode(params)
upload_request =urllib2.Request(url, datagen, headers)
response = urllib2.urlopen(upload_request).read()
endTime = getNow()
totlaTime = caltime(startTime, endTime)
logger.info("??????{0} ????{1} ????{2} ????{3} ??{4} ????{5}"
.format(filePath,startTime, endTime, fileSizeStr, totlaTime,response))
#???????????'4b ? 0'???json ??errmsg("Extra data", s, end, len(s) ??????,??????????
if "code\":0" in response.replace(' ', ''):
result = True
else:
result = json.loads(response)["code"] == 0
except Exception as e:
logger.error("??????{0} exception: {1}".format(filePath,e))
return result
def _addresses_to_check_with_caching(self, show_progress=True):
num_addrs = len(list(self._addresses_to_check()))
widgets = ['ROP: ', progressbar.Percentage(), ' ',
progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed()]
progress = progressbar.ProgressBar(widgets=widgets, maxval=num_addrs)
if show_progress:
progress.start()
self._cache = dict()
seen = dict()
for i, a in enumerate(self._addresses_to_check()):
if show_progress:
progress.update(i)
try:
bl = self.project.factory.block(a)
if bl.size > self._max_block_size:
continue
block_data = bl.bytes
except (SimEngineError, SimMemoryError):
continue
if block_data in seen:
self._cache[seen[block_data]].add(a)
continue
else:
if len(bl.vex.constant_jump_targets) == 0 and not self._block_has_ip_relative(a, bl):
seen[block_data] = a
self._cache[a] = set()
yield a
if show_progress:
progress.finish()
def main():
'''
Controls general flow of operations
'''
# If it exists, use the cached data of hosts and items
if (os.path.isfile(move_items_file)):
with open(move_items_file) as infile:
hosts=json.load(infile)
logger.info('Cache loaded from file (%s)' % move_items_file)
else:
hosts=getItems()
with open(move_items_file, 'w') as outfile:
json.dump(hosts, outfile)
logger.info('Cache written to file (%s)' % move_items_file)
for host in hosts:
logger.info('Geting trends data of host: %s' % host['name'])
host['trends']=list()
host['trends_uint']=list()
if host['itens'].__len__() > 0:
bar=ProgressBar(maxval=host['itens'].__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start()
i=0
for item in host['itens']:
temp=getTrends(hostname=host['name'],item=item)
i+=1
if args.loglevel.upper() != 'DEBUG':
bar.update(i)
if temp['table'] == 'trends':
for value in temp['values']:
host['trends'].append('(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5])))
elif temp['table'] == 'trends_uint':
for value in temp['values']:
host['trends_uint'].append('(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5])))
else:
logger.warning('Unknown value type: %s' % temp['table'])
bar.finish()
'''
Now, we send in blocks of up to ~1M values to generate the SQL files
'''
if host['trends'].__len__() > 0:
createSQL(table='trends',values=host['trends'],name=host['name'])
elif host['trends_uint'].__len__() > 0:
createSQL(table='trends_uint',values=host['trends_uint'],name=host['name'])
else:
logger.warning('No data from %s found to be sent.' % host['name'])
# Start DB connection