def render(self, length=None, progress=False):
"""
Render this signal into an numpy array of floats. Return the array.
:param length: The length to render, in seconds. Optional.
:param progress: Whether to show a progress bar for rendering
"""
if progress and not progressbar:
print('Install the progressbar module to see a progress bar for rendering')
progress = False
duration = self.duration if length is None else length * SAMPLE_RATE
if duration == float('inf'):
duration = 3*SAMPLE_RATE
else:
duration = int(duration)
out = numpy.empty((duration, 1))
pbar = progressbar.ProgressBar(widgets=['Rendering: ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=duration-1).start() if progress else None
for i in range(duration):
out[i] = self.amplitude(i)
if pbar: pbar.update(i)
if pbar: pbar.finish()
return out
python类Bar()的实例源码
def main():
uri, outfile, dataset = get_arguments()
fd = tempfile.NamedTemporaryFile()
progress = ProgressBar(widgets=[Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()])
def update(count, blockSize, totalSize):
if progress.maxval is None:
progress.maxval = totalSize
progress.start()
progress.update(min(count * blockSize, totalSize))
urllib.urlretrieve(uri, fd.name, reporthook = update)
if dataset == 'zinc12':
df = pandas.read_csv(fd.name, delimiter = '\t')
df = df.rename(columns={'SMILES':'structure'})
df.to_hdf(outfile, 'table', format = 'table', data_columns = True)
elif dataset == 'chembl22':
df = pandas.read_table(fd.name,compression='gzip')
df = df.rename(columns={'canonical_smiles':'structure'})
df.to_hdf(outfile, 'table', format = 'table', data_columns = True)
pass
else:
df = pandas.read_csv(fd.name, delimiter = '\t')
df.to_hdf(outfile, 'table', format = 'table', data_columns = True)
def knn_masked_data(trX,trY,missing_data_dir, input_shape, k):
raw_im_data = np.loadtxt(join(script_dir,missing_data_dir,'index.txt'),delimiter=' ',dtype=str)
raw_mask_data = np.loadtxt(join(script_dir,missing_data_dir,'index_mask.txt'),delimiter=' ',dtype=str)
# Using 'brute' method since we only want to do one query per classifier
# so this will be quicker as it avoids overhead of creating a search tree
knn_m = KNeighborsClassifier(algorithm='brute',n_neighbors=k)
prob_Y_hat = np.zeros((raw_im_data.shape[0],int(np.max(trY)+1)))
total_images = raw_im_data.shape[0]
pbar = progressbar.ProgressBar(widgets=[progressbar.FormatLabel('\rProcessed %(value)d of %(max)d Images '), progressbar.Bar()], maxval=total_images, term_width=50).start()
for i in range(total_images):
mask_im=load_image(join(script_dir,missing_data_dir,raw_mask_data[i][0]), input_shape,1).reshape(np.prod(input_shape))
mask = np.logical_not(mask_im > eps) # since mask is 1 at missing locations
v_im=load_image(join(script_dir,missing_data_dir,raw_im_data[i][0]), input_shape, 255).reshape(np.prod(input_shape))
rep_mask = np.tile(mask,(trX.shape[0],1))
# Corrupt whole training set according to the current mask
corr_trX = np.multiply(trX, rep_mask)
knn_m.fit(corr_trX, trY)
prob_Y_hat[i,:] = knn_m.predict_proba(v_im.reshape(1,-1))
pbar.update(i)
pbar.finish()
return prob_Y_hat
def preprocess(self, questions: List[QASetting],
answers: Optional[List[List[Answer]]] = None,
is_eval: bool = False) -> List[XQAAnnotation]:
if answers is None:
answers = [None] * len(questions)
preprocessed = []
if len(questions) > 1000:
bar = progressbar.ProgressBar(
max_value=len(questions),
widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') '])
for q, a in bar(zip(questions, answers)):
preprocessed.append(self.preprocess_instance(q, a))
else:
for q, a in zip(questions, answers):
preprocessed.append(self.preprocess_instance(q, a))
return preprocessed
def preprocess(self, questions: List[QASetting],
answers: Optional[List[List[Answer]]] = None,
is_eval: bool = False) -> List[MCAnnotation]:
if answers is None:
answers = [None] * len(questions)
preprocessed = []
if len(questions) > 1000:
bar = progressbar.ProgressBar(
max_value=len(questions),
widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') '])
for i, (q, a) in bar(enumerate(zip(questions, answers))):
preprocessed.append(self.preprocess_instance(i, q, a))
else:
for i, (q, a) in enumerate(zip(questions, answers)):
preprocessed.append(self.preprocess_instance(i, q, a))
return preprocessed
def collect_mailids(server):
folders = server.list_folders()
#construct progressbar
progressbar_widgets = [
'[Searching for mails on server] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ']
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(folders)).start()
#collect all mailids for all folders
folder_contents = {}
folder_progress = 0
for flags, delimiter, folder in folders:
#read all mailids for the folder
server.select_folder(folder, readonly=True)
folder_contents[folder] = server.search()
#update progrssbar
folder_progress += 1
progressbar_instance.update(folder_progress)
progressbar_instance.finish()
return folder_contents
def download(download_list, total_download_size):
progressbar_widgets = [
'[Downloading mails ] ',
progressbar.Percentage(),
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ',
progressbar.ETA(), ' ',
bitmath.integrations.BitmathFileTransferSpeed()]
progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=int(total_download_size)).start()
downloaded_size = bitmath.Byte(0)
for folder, mails in download_list.items():
server.select_folder(folder, readonly=True)
for mailid, mailfilename, mailsize in mails:
#make parent directory
if not os.path.isdir(os.path.dirname(mailfilename)):
os.makedirs(os.path.dirname(mailfilename))
#download mail
with open(mailfilename, 'wb') as mailfile:
mailfile.write(server.fetch([mailid], ['RFC822'])[mailid][b'RFC822'])
#update progressbar
downloaded_size += mailsize
progressbar_instance.update(int(downloaded_size))
progressbar_instance.finish()
def bruteforce():
import progressbar
from time import sleep
bar = progressbar.ProgressBar(maxval=60, \
widgets=[progressbar.Bar('==', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
for i in xrange(10):
bar.update(i+1)
sleep(0.05)
wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt"
target = "/root/2fassassin/loot/*.pfx"
sign = ""
sign += "crackpkcs12 -v -b"
sign += " "
sign += target
sign += "| tee crack.log"
os.system(sign)
bar.finish()
sys.exit()
def bruteforce():
import progressbar
from time import sleep
bar = progressbar.ProgressBar(maxval=60, \
widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
for i in xrange(10):
bar.update(i+1)
sleep(0.05)
wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt"
target = "/root/2fassassin/loot/*.pfx"
sign = ""
sign += "crackpkcs12 -v -b"
sign += " "
sign += target
sign += "| tee crack.log"
os.system(sign)
bar.finish()
sys.exit()
def Steg_brute(ifile, dicc):
i = 0
ofile = ifile.split('.')[0] + "_flag.txt"
nlines = len(open(dicc).readlines())
with open(dicc, 'r') as passFile:
pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nlines).start()
for line in passFile.readlines():
password = line.strip('\n')
r = commands.getoutput("steghide extract -sf %s -p '%s' -xf %s" % (ifile, password, ofile))
if not "no pude extraer" in r and not "could not extract" in r:
print(color.GREEN + "\n\n " + r + color.ENDC)
print("\n\n [+] " + color.INFO + "Information obtained with password:" + color.GREEN + " %s\n" % password + color.ENDC)
if check_file(ofile):
with open(ofile, 'r') as outfile:
for line in outfile.readlines():
print(line)
break
pbar.update(i + 1)
i += 1
def progress_bar(n):
import progressbar
return progressbar.ProgressBar(
max_value=n,
widgets=[
progressxbar.Percentage(),
' ',
'(',
progressbar.SimpleProgress(),
')',
' ',
progressbar.Bar(),
' ',
progressbar.AdaptiveETA(),
])
# http://code.activestate.com/recipes/577058/
def __call__(self, progress, data, width):
if progress.end_time:
return self.finish_msg
if progress.max_value is progressbar.UnknownLength:
bar = progressbar.BouncingBar
else:
bar = progressbar.Bar
line = bar.__call__(self, progress, data, width)
if data["percentage"] is None:
msg = self.msg
else:
msg = "%s (%d%%)" % (self.msg, data["percentage"])
offset = width // 2 - len(msg) // 2
return line[:offset] + msg + line[offset + len(msg):]
def __init__(self, *args, **kwargs):
self.dld = FileDownloader()
self.dld.stage(self.cmd_name)
load_continents()
load_oceans()
load_currencies()
load_languages()
self.widgets = [
MemoryUsage(),
progressbar.ETA(),
' |Processed: ',
progressbar.Counter(),
' |Done: ',
progressbar.Percentage(),
progressbar.Bar(),
]
return super().__init__(*args, **kwargs)
def __init__(self, name, max_value=100, history_len=5, display=True,
display_data={'train':['loss', 'accuracy'], 'test':['loss', 'accuracy']},
level=logging.INFO, train_log_mode='TRAIN_PROGRESS', test_log_mode='TEST_PROGRESS'):
super(ProgressbarLogger, self).__init__(
name, level=level, display=display, logfile=None,
train_log_mode=train_log_mode, test_log_mode=test_log_mode)
self.train_log_data = {}
self.test_log_data = {}
self.max_value = max_value
self.history_len = history_len
self.display_data = display_data
self.mode['TRAIN_PROGRESS'] = self.log_train_progress
self.mode['TEST_PROGRESS'] = self.log_test_progress
# create logging format
self.widgets = [progressbar.FormatLabel('(%(value)d of %(max)s)'),
' ', progressbar.Percentage(),
' ', progressbar.Bar()]
self.dynamic_data = {k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]}
diff_data = {'diff_'+k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]}
self.dynamic_data.update(diff_data)
for t in display_data.keys():
ddstr = ' [' + t + ']'
for s in display_data[t]:
value_name = t + '_' + s
ddstr = ddstr + ' ' + s + ':' + '%(' + value_name + ').3f (%(diff_' + value_name + ').3f)'
self.widgets.append(progressbar.FormatLabel(ddstr))
self.widgets.extend(['|', progressbar.FormatLabel('Time: %(elapsed)s'), '|', progressbar.AdaptiveETA()])
def train(self, epochs, batch_size, learning_rate, save_to=None):
self.train_step = pt.apply_optimizer(tf.train.AdamOptimizer(learning_rate, epsilon=1), losses = [self.error_function])
init = tf.initialize_all_variables()
self.sess.run(init)
pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=epochs).start()
while self.get_epoch() < epochs:
input_data = self.hdf5reader.next()
_, loss_value = self.sess.run(
[self.train_step, self.error_function],
{
self.encoder.input_data: input_data
}
)
pbar.update(self.get_epoch())
pbar.finish()
def images_to_hdf5(dir_path, output_hdf5, size = (112,112), channels = 3, resize_to = None):
files = sorted(os.listdir(dir_path))
nr_of_images = len(files)
if resize_to:
size = resize_to
i = 0
pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nr_of_images).start()
data = np.empty(shape=(nr_of_images, size[0], size[1], channels), dtype=np.uint8)
for f in files:
datum = imread(dir_path + '/' + f)
if resize_to:
datum = np.asarray(Image.fromarray((datum), 'RGB').resize((size[0],size[1]), PIL.Image.ANTIALIAS))
data[i,:,:,:] = datum
i = i + 1
pbar.update(i)
pbar.finish()
with h5py.File(output_hdf5, 'w') as hf:
hf.create_dataset('data', data=data)
def load_corpus(self, corenlpserver, process=True):
"""
Use the PubMed web services to retrieve the title and abstract of each PMID
:param corenlpserver:
:param process:
:return:
"""
time_per_abs = []
widgets = [pb.Percentage(), ' ', pb.Bar(), ' ', pb.AdaptiveETA(), ' ', pb.Timer()]
pbar = pb.ProgressBar(widgets=widgets, maxval=len(self.pmids), redirect_stdout=True).start()
for i, pmid in enumerate(self.pmids):
t = time.time()
newdoc = PubmedDocument(pmid)
if newdoc.abstract == "":
logging.info("ignored {} due to the fact that no abstract was found".format(pmid))
continue
newdoc.process_document(corenlpserver, "biomedical")
self.documents["PMID" + pmid] = newdoc
abs_time = time.time() - t
time_per_abs.append(abs_time)
pbar.update(i+1)
pbar.finish()
abs_avg = sum(time_per_abs)*1.0/len(time_per_abs)
logging.info("average time per abstract: %ss" % abs_avg)
def getProgress(self, url, fileSize):
status = json.loads(urllib.urlopen(url).read())
if len(status["data"]) ==0 :
logger.info(url + " upload done ")
return True
widgets = ['Progress: ', Percentage(), ' ', Bar(
marker=RotatingMarker('>-=')), ' ', ETA(), ' ', FileTransferSpeed()]
pbar = ProgressBar(widgets=widgets, maxval=fileSize).start()
upload_size = 0
while upload_size < fileSize:
_response = self.doGet(url)
_data = json.loads(_response)
upload_size = long(_data["data"]["upload_size"])
total_size = long(_data["data"]["total_size"])
if upload_size == 0 and total_size == 0:
break
pbar.update(upload_size)
time.sleep(1)
pbar.finish()
logger.info(url + " upload done")
return True
"""
??????
"""
def scrape_mlb_odds_range(min_date=None, max_date=None):
min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1)
max_date = max_date or datetime.datetime.today()
if isinstance(min_date, basestring):
min_date = parser.parse(min_date)
if isinstance(max_date, basestring):
max_date = parser.parse(max_date)
date = min_date
pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()],
maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1)
pbar.start()
saved = 0
hit = 0
while date <= max_date:
day_odds = load_odds_for_day(date)
if day_odds is not None and len(day_odds) > 0:
save_sbr_odds_info('mlb', date, day_odds)
saved += 1
hit += 1
date += datetime.timedelta(days=1)
pbar.update(value=hit)
pbar.finish()
return saved
def scrape_nba_odds_range(min_date=None, max_date=None):
min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1)
max_date = max_date or datetime.datetime.today()
if isinstance(min_date, basestring):
min_date = parser.parse(min_date)
if isinstance(max_date, basestring):
max_date = parser.parse(max_date)
date = min_date
pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()],
maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1)
pbar.start()
saved = 0
hit = 0
while date <= max_date:
day_odds = load_odds_for_day(date)
if day_odds is not None and len(day_odds) > 0:
save_sbr_odds_info('nba', date, day_odds)
saved += 1
hit += 1
date += datetime.timedelta(days=1)
pbar.update(value=hit)
pbar.finish()
return saved
def __iter__(self):
if self.count != 0:
widgets = [
'%s: ' % (self.caption,),
progressbar.Percentage(),
' ',
progressbar.Bar(),
' ',
progressbar.ETA(),
]
pbar = progressbar.ProgressBar(widgets=widgets, maxval=self.count)
pbar.start()
for idx, item in enumerate(self.iterator):
yield item
pbar.update(idx)
pbar.finish()
def _setup_progress(self, options):
if options.progress:
if self.beanstalk:
# With Beanstalk C&C we don't know how many...
self.progress = progressbar.ProgressBar(
redirect_stdout=True,
redirect_stderr=True,
widgets=[
'Total: ',
progressbar.Counter(),
', ',
progressbar.Timer()
])
else:
self.progress = progressbar.ProgressBar(
redirect_stdout=True,
redirect_stderr=True,
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
' (', progressbar.ETA(), ') ',
])
else:
self.progress = None
def __init__(self, options):
self.wildcards = []
self.options = options
self.domains = []
if options.domains:
self.domains += filter(None, options.domains.read().split("\n"))
self.domains += options.domain
self.domains = list(set(self.domains))
random.shuffle(self.domains)
self.resolvers = map(str.strip, filter(None, options.resolvers.read().split("\n")))
random.shuffle(self.resolvers)
self.names = [X for X in self._load_names(options.names)]
if options.progress:
self.progress = progressbar.ProgressBar(
redirect_stdout=True,
redirect_stderr=True,
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
' (', progressbar.ETA(), ') ',
])
else:
self.progress = None
self.finished = 0
LOG.info("%d names, %d resolvers, %d domains",
len(self.names), len(self.resolvers), len(self.domains))
def compute_embeddings(images):
"""Runs inference on an image.
Args:
image: Image file names.
Returns:
Dict mapping image file name to embedding.
"""
# Creates graph from saved GraphDef.
create_graph()
filename_to_emb = {}
config = tf.ConfigProto(device_count = {'GPU': 0})
bar = progressbar.ProgressBar(widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
with tf.Session(config=config) as sess:
i = 0
for image in bar(images):
if not tf.gfile.Exists(image):
tf.logging.fatal('File does not exist %s', image)
image_data = tf.gfile.FastGFile(image, 'rb').read()
# Some useful tensors:
# 'softmax:0': A tensor containing the normalized prediction across
# 1000 labels.
# 'pool_3:0': A tensor containing the next-to-last layer containing 2048
# float description of the image.
# 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG
# encoding of the image.
# Runs the softmax tensor by feeding the image_data as input to the graph.
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
embedding_tensor = sess.graph.get_tensor_by_name('pool_3:0')
embedding = sess.run(embedding_tensor,
{'DecodeJpeg/contents:0': image_data})
filename_to_emb[image] = embedding.reshape(2048)
i += 1
# print(image, i, len(images))
return filename_to_emb
# temp_dir is a subdir of temp
def main(project_id, video_basename, sampling_rate=3):
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' # or any {'0', '1', '2'}
video_name = video_basename[:video_basename.index('.')]
# extract video frames
extracted_frame_dir = os.path.join('temp', project_id, video_name, 'frames')
mkdir_p(extracted_frame_dir)
if not os.path.isdir(extracted_frame_dir):
os.mkdir(extracted_frame_dir)
video_path = os.path.join('videos', project_id, video_basename)
vidcap = cv2.VideoCapture(video_path)
print('Extracting video frames...')
bar = progressbar.ProgressBar(maxval=101, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
fps = vidcap.get(CV_CAP_PROP_FPS)# TODO
fps = fps if fps != float('nan') else 25
print 'actual fps', fps, 'sampling rate', sampling_rate
success, image = vidcap.read()
frames_to_extract = range(0, int(vidcap.get(CV_CAP_PROP_FRAME_COUNT)), int(round(fps / sampling_rate)))
frame_count = len(frames_to_extract)
for frame_pos in bar(frames_to_extract):
vidcap.set(CV_CAP_PROP_POS_FRAMES, frame_pos)
success, image = vidcap.read()
# print('Read a new frame: %f ms'% vidcap.get(CV_CAP_PROP_POS_MSEC), success)
cv2.imwrite(os.path.join(extracted_frame_dir, "%09d.jpg" % vidcap.get(CV_CAP_PROP_POS_MSEC)), image) # TODO (might still work)
bar.finish()
def download(number, save_dir='./'):
"""Download pre-trained word vector
:param number: integer, default ``None``
:param save_dir: str, default './'
:return: file path for downloaded file
"""
df = load_datasets()
row = df.iloc[[number]]
url = ''.join(row.URL)
if not url:
print('The word vector you specified was not found. Please specify correct name.')
widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()]
pbar = ProgressBar(widgets=widgets)
def dlProgress(count, blockSize, totalSize):
if pbar.max_value is None:
pbar.max_value = totalSize
pbar.start()
pbar.update(min(count * blockSize, totalSize))
file_name = url.split('/')[-1]
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, file_name)
path, _ = urlretrieve(url, save_path, reporthook=dlProgress)
pbar.finish()
return path
def __enter__(self):
self.bar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
' ',
progressbar.Bar(),
progressbar.FileTransferSpeed(),
' ',
progressbar.ETA(),
],
max_value=self.max_value,
)
self.fd = open(self.output_path, 'wb')
return self
def train(self):
data = Data(self.train_dat, self.train_lab)
batch_num = self.length/self.batch_size if self.length%self.batch_size == 0 else self.length/self.batch_size + 1
model = self.add_model()
with self.sess as sess:
tf.initialize_all_variables().run()
for ite in range(self.iterations):
print "Iteration {}".format(ite)
cost = 0.
pbar = pb.ProgressBar(widgets=[pb.Percentage(), pb.Bar(), pb.ETA()], maxval=batch_num).start()
for i in range(batch_num):
batch_x, batch_y = data.next_batch(self.batch_size)
c, _ = self.sess.run([model['loss'], model['optimizer']], feed_dict={model['train_x']:batch_x, model['train_y']:batch_y, model['p_keep_dens']:0.75})
cost += c / batch_num
pbar.update(i+1)
pbar.finish()
print ">>cost: {}".format(cost)
t_acc, d_acc = self.eval(model, 3000)
# early stop
if t_acc >= 0.995 and d_acc >= 0.995:
break
self.predict(model)
def bar_update(self, epoch, logs):
ologs = {}
for k in self.custom_log_functions:
ologs[k] = self.custom_log_functions[k]()
for k in logs:
if len(k) > 5:
ologs[k[-5:]] = logs[k]
else:
ologs[k] = logs[k]
if not hasattr(self,'bar'):
import progressbar
widgets = [
progressbar.Timer(format='%(elapsed)s'),
' ', progressbar.Counter(),
progressbar.Bar(),
progressbar.AbsoluteETA(format='%(eta)s'), ' ',
]
keys = []
for k in ologs:
keys.append(k)
keys.sort()
for k in keys:
widgets.append(progressbar.DynamicMessage(k))
widgets.append(' ')
self.bar = progressbar.ProgressBar(max_value=self.max_epoch, widgets=widgets)
self.bar.update(epoch+1, **ologs)
def __call__(self, epoch):
if self._batches is None:
logger.info("Preparing evaluation data...")
self._batches = self.reader.input_module.batch_generator(self._dataset, self._batch_size, is_eval=True)
logger.info("Started evaluation %s" % self._info)
metrics = defaultdict(lambda: list())
bar = progressbar.ProgressBar(
max_value=len(self._dataset) // self._batch_size + 1,
widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') '])
for i, batch in bar(enumerate(self._batches)):
inputs = self._dataset[i * self._batch_size:(i + 1) * self._batch_size]
predictions = self.reader.model_module(batch, self._ports)
m = self.apply_metrics(inputs, predictions)
for k in self._metrics:
metrics[k].append(m[k])
metrics = self.combine_metrics(metrics)
super().add_to_history(metrics, self._iter, epoch)
printmetrics = sorted(metrics.keys())
res = "Epoch %d\tIter %d\ttotal %d" % (epoch, self._iter, self._total)
for m in printmetrics:
res += '\t%s: %.3f' % (m, metrics[m])
self.update_summary(self._iter, self._info + '_' + m, metrics[m])
if self._write_metrics_to is not None:
with open(self._write_metrics_to, 'a') as f:
f.write("{0} {1} {2:.5}\n".format(datetime.now(), self._info + '_' + m,
np.round(metrics[m], 5)))
res += '\t' + self._info
logger.info(res)
if self._side_effect is not None:
self._side_effect_state = self._side_effect(metrics, self._side_effect_state)