def downloadFilesSave(links, fileFormat): # main function
if (links == 'EMPTY'): # if links list is empty
return ' NO LINKS FOUND !'
for link in links:
name = random.randint(0, 10000001)
if (name in os.listdir(os.getcwd())): # random name to files
name = random.randint(0, 10000001)
if (format not in ['zip', 'png', 'jpg', 'jpeg', 'tiff', 'bmp', 'svg', 'gif']):
try:
saveFile=open(str(name)+'.' + fileFormat, 'w')
saveFile.write(urllib2.urlopen(link).read())
saveFile.close()
except urllib2.URLError:
pass
else:
try:
saveFile=open(str(name)+'.' + fileFormat, 'wb')
saveFile.write(urllib2.urlopen(link).read())
saveFile.close()
except urllib2.URLError:
pass
return ' {} DOWNLOADS SUCCESSFULL YET !'.format(len(os.listdir(os.getcwd())))
python类listdir()的实例源码
def get_firefox_db(db_file):
'''Return the full path of firefox sqlite databases, platform independent'''
success = False
plat_dict = {"Windows 7" : r"C:\Users\%s\AppData\Roaming\Mozilla\Firefox\Profiles" % os.getlogin(),
"Windows XP" : r"C:\Documents and Settings\%s\Application Data\Mozilla\Firefox\Profiles" % os.getlogin(),
"Linux" : r"/home/%s/.mozilla/firefox/" % os.getlogin(),
"Darwin" : r"/Users/%s/Library/Application Support/Firefox/Profiles" % os.getlogin()}
if platform.system() == "Windows":
string = plat_dict[platform.system() + " " + platform.release()]
else:
string = plat_dict[platform.system()]
for item in os.listdir(string):
if os.path.isdir(os.path.join(string, item)) and "default" in item:
if os.path.isfile(os.path.join(string, item, db_file)):
success = True
return os.path.join(string, item, db_file)
if not success:
sys.exit("Couldn't find the database file in the default location! Try providing a different location using the -b option...")
def addsitedir(sitedir, known_paths=None):
"""Add 'sitedir' argument to sys.path if missing and handle .pth files in
'sitedir'"""
if known_paths is None:
known_paths = _init_pathinfo()
reset = 1
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not sitedircase in known_paths:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
names.sort()
for name in names:
if name.endswith(os.extsep + "pth"):
addpackage(sitedir, name, known_paths)
if reset:
known_paths = None
return known_paths
def _build_one(self, req, output_dir, python_tag=None):
"""Build one wheel.
:return: The filename of the built wheel, or None if the build failed.
"""
tempd = tempfile.mkdtemp('pip-wheel-')
try:
if self.__build_one(req, tempd, python_tag=python_tag):
try:
wheel_name = os.listdir(tempd)[0]
wheel_path = os.path.join(output_dir, wheel_name)
shutil.move(os.path.join(tempd, wheel_name), wheel_path)
logger.info('Stored in directory: %s', output_dir)
return wheel_path
except:
pass
# Ignore return, we can't do anything else useful.
self._clean_one(req)
return None
finally:
rmtree(tempd)
def maybe_move(self, spec, dist_filename, setup_base):
dst = os.path.join(self.build_directory, spec.key)
if os.path.exists(dst):
msg = (
"%r already exists in %s; build directory %s will not be kept"
)
log.warn(msg, spec.key, self.build_directory, setup_base)
return setup_base
if os.path.isdir(dist_filename):
setup_base = dist_filename
else:
if os.path.dirname(dist_filename) == setup_base:
os.unlink(dist_filename) # get it out of the tmp dir
contents = os.listdir(setup_base)
if len(contents) == 1:
dist_filename = os.path.join(setup_base, contents[0])
if os.path.isdir(dist_filename):
# if the only thing there is a directory, move it instead
setup_base = dist_filename
ensure_directory(dst)
shutil.move(setup_base, dst)
return dst
def get_3d_data_slices(slices): # get data in Hunsfield Units
#slices = [dicom.read_file(path + '/' + s) for s in os.listdir(path)]
#slices.sort(key=lambda x: int(x.InstanceNumber)) # was x.InstanceNumber
slices.sort(key = lambda x: int(x.ImagePositionPatient[2])) # from v 8
image = np.stack([s.pixel_array for s in slices])
image = image.astype(np.int16) # ensure int16 (it may be here uint16 for some images )
image[image == -2000] = 0 #correcting cyindrical bound entrioes to 0
# Convert to Hounsfield units (HU)
# The intercept is usually -1024
for slice_number in range(len(slices)): # from v 8
intercept = slices[slice_number].RescaleIntercept
slope = slices[slice_number].RescaleSlope
if slope != 1: # added 16 Jan 2016, evening
image[slice_number] = slope * image[slice_number].astype(np.float64)
image[slice_number] = image[slice_number].astype(np.int16)
image[slice_number] += np.int16(intercept)
return np.array(image, dtype=np.int16)
def get_3d_data_hu(path): # get data in Hunsfield Units
slices = [dicom.read_file(path + '/' + s) for s in os.listdir(path)]
#slices.sort(key=lambda x: int(x.InstanceNumber)) # was x.InstanceNumber
#slices.sort(key = lambda x: int(x.ImagePositionPatient[2])) # from v8 - BUGGY
slices.sort(key = lambda x: float(x.ImagePositionPatient[2])) # from 22.02
image = np.stack([s.pixel_array for s in slices])
image = image.astype(np.int16) # ensure int16 (it may be here uint16 for some images )
image[image == -2000] = 0 #correcting cyindrical bound entrioes to 0
# Convert to Hounsfield units (HU)
# The intercept is usually -1024
for slice_number in range(len(slices)): # from v 8
intercept = slices[slice_number].RescaleIntercept
slope = slices[slice_number].RescaleSlope
if slope != 1: # added 16 Jan 2016, evening
image[slice_number] = slope * image[slice_number].astype(np.float64)
image[slice_number] = image[slice_number].astype(np.int16)
image[slice_number] += np.int16(intercept)
return np.array(image, dtype=np.int16)
def get_all_problem_instances(problem_path):
""" Returns a list of instances for a given problem """
instances = []
instances_dir = join(DEPLOYED_ROOT, problem_path)
if os.path.isdir(instances_dir):
for name in os.listdir(instances_dir):
if name.endswith(".json"):
try:
instance = json.loads(open(join(instances_dir, name)).read())
except Exception as e:
continue
instances.append(instance)
return instances
def test_cppCompNet(self):
nodebooter, domMgr = self.launchDomainManager()
self.assertNotEqual(domMgr, None)
nodebooter, devMgr = self.launchDeviceManager("/nodes/test_GPP_node/DeviceManager.dcd.xml")
self.assertNotEqual(devMgr, None)
domMgr.installApplication("/waveforms/cpp_comp_w/cpp_comp_w.sad.xml")
self.assertEqual(len(domMgr._get_applicationFactories()), 1)
appFact = domMgr._get_applicationFactories()[0]
app = appFact.create(appFact._get_name(), [], [])
self.assertEqual(len(domMgr._get_applications()), 1)
app.start()
time.sleep(0.5)
nic_name = app._get_registeredComponents()[0].componentObject.query([CF.DataType(id='nic_name',value=any.to_any(None))])[0].value._v
nic_names = os.listdir('/sys/class/net')
self.assertTrue(nic_name in nic_names)
app.releaseObject()
self.assertEqual(len(domMgr._get_applications()), 0)
def test_javaCompNet(self):
nodebooter, domMgr = self.launchDomainManager()
self.assertNotEqual(domMgr, None)
nodebooter, devMgr = self.launchDeviceManager("/nodes/test_GPP_node/DeviceManager.dcd.xml")
self.assertNotEqual(devMgr, None)
domMgr.installApplication("/waveforms/java_comp_w/java_comp_w.sad.xml")
self.assertEqual(len(domMgr._get_applicationFactories()), 1)
appFact = domMgr._get_applicationFactories()[0]
app = appFact.create(appFact._get_name(), [], [])
self.assertEqual(len(domMgr._get_applications()), 1)
app.start()
time.sleep(0.5)
nic_name = app._get_registeredComponents()[0].componentObject.query([CF.DataType(id='nic_name',value=any.to_any(None))])[0].value._v
nic_names = os.listdir('/sys/class/net')
self.assertTrue(nic_name in nic_names)
app.releaseObject()
self.assertEqual(len(domMgr._get_applications()), 0)
def updateListAvailableWaveforms(self):
"""
Update available waveforms list.
"""
waveroot = os.path.join(self.root, 'waveforms')
if not os.path.exists(waveroot):
print "Cannot find SDR waveforms directory"
#return {}
return
self.waveforms = {}
for wave_dir in os.listdir(waveroot):
wave_dir_path = os.path.join(waveroot,wave_dir)
if not os.path.isdir(wave_dir_path):
continue
for wave_file in os.listdir(wave_dir_path):
if ".sad.xml" in wave_file.lower():
f_path = os.path.join('waveforms', wave_dir)
f_path = os.path.join(f_path, wave_file)
if wave_dir not in self.waveforms:
self.waveforms[wave_dir] = f_path
def __processDir(self):
"""
Looks for Makefiles in the given directory and all the sub-directories
if recursive is set to true
"""
self.__log("Processing directory %s" % self.__tgt)
# if recurse, then use walk otherwise do current directory only
if self.__recurse:
for (path, dirs, files) in os.walk(self.__tgt):
for curr_file in files:
# if the file is a Makefile added to process
if curr_file == __PATTERN__:
fname = os.path.join(path, curr_file)
self.__make_files.append(fname)
self.__log("Adding %s to list" % fname)
else:
# just care to find Makefiles in this directory
files = os.listdir(self.__tgt)
if __PATTERN__ in files:
fname = os.path.join(self.__tgt, __PATTERN__)
self.__log("Appending %s to the list" % fname)
self.__make_files.append(fname)
def storageindex(self):
#get the filelist
onlyfiles = [ f for f in listdir(self.indexdata) if isfile(join(self.indexdata,f)) ]
#read from using pandas
for f in onlyfiles:
df = pd.read_csv(self.indexdata+"/"+f)
s=f.split('.')
name = s[0][2:8]
records = json.loads(df.T.to_json()).values()
for row in records:
row['date'] = datetime.datetime.strptime(row['date'], "%Y-%m-%d")
print name
self.index[name].insert_many(records)
#storage stock pool into database
def find_templates():
"""
Load python modules from templates directory and get templates list
:return: list of tuples (pairs):
[(compiled regex, lambda regex_match: return message_data)]
"""
templates = []
templates_directory = (inspect.getsourcefile(lambda: 0).rstrip('__init__.py') +
'templates')
template_files = os.listdir(templates_directory)
for template_file in template_files:
if template_file.startswith('.') or not template_file.endswith('.py'):
continue
# Hack for dev development and disutils
try:
template_module = importlib.import_module('templates.{}'.format(
template_file.rstrip('.py')
))
except ImportError:
template_module = importlib.import_module('ross.templates.{}'.format(
template_file.rstrip('.py')
))
# Iterate throw items in template.
# If there are variable ends with 'templates',
# extend templates list with it.
for (name, content) in template_module.__dict__.items():
if name.endswith('templates'):
for (regex_text, data_func) in content:
templates.append((re.compile(regex_text, re.IGNORECASE), data_func))
return templates
def get_dataset(dataset_path='Data/Train_Data'):
# Getting all data from data path:
try:
X = np.load('Data/npy_train_data/X.npy')
Y = np.load('Data/npy_train_data/Y.npy')
except:
inputs_path = dataset_path+'/input'
images = listdir(inputs_path) # Geting images
X = []
Y = []
for img in images:
img_path = inputs_path+'/'+img
x_img = get_img(img_path).astype('float32').reshape(64, 64, 3)
x_img /= 255.
y_img = get_img(img_path.replace('input/', 'mask/mask_')).astype('float32').reshape(64, 64, 1)
y_img /= 255.
X.append(x_img)
Y.append(y_img)
X = np.array(X)
Y = np.array(Y)
# Create dateset:
if not os.path.exists('Data/npy_train_data/'):
os.makedirs('Data/npy_train_data/')
np.save('Data/npy_train_data/X.npy', X)
np.save('Data/npy_train_data/Y.npy', Y)
X, X_test, Y, Y_test = train_test_split(X, Y, test_size=0.1, random_state=42)
return X, X_test, Y, Y_test
def create_model(self, train_folder):
"""
Return the training set, its labels and the trained model
:param train_folder: folder where to retrieve data
:return: (train_set, train_labels, trained_model)
"""
digits = []
labels = []
for n in range(1, 10):
folder = train_folder + str(n)
samples = [pic for pic in os.listdir(folder)
if os.path.isfile(os.path.join(folder, pic))]
for sample in samples:
image = cv2.imread(os.path.join(folder, sample))
# Expecting black on white
image = 255 - cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
_, image = cv2.threshold(image, 0, 255,
cv2.THRESH_BINARY + cv2.THRESH_OTSU)
feat = self.feature(image)
digits.append(feat)
labels.append(n)
digits = np.array(digits, np.float32)
labels = np.array(labels, np.float32)
if cv2.__version__[0] == '2':
model = cv2.KNearest()
model.train(digits, labels)
else:
model = cv2.ml.KNearest_create()
model.train(digits, cv2.ml.ROW_SAMPLE, labels)
return digits, labels, model
def copy_files(src, dst, symlinks=False, ignore=None):
"""Copy files from src to dst."""
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
def execd_module_paths(execd_dir=None):
"""Generate a list of full paths to modules within execd_dir."""
if not execd_dir:
execd_dir = default_execd_dir()
if not os.path.exists(execd_dir):
return
for subpath in os.listdir(execd_dir):
module = os.path.join(execd_dir, subpath)
if os.path.isdir(module):
yield module
def all_migrations(cls):
migrations = []
files = os.listdir(os.path.join(REDBERRY_ROOT, 'models', 'migrations'))
for f in files:
try:
bits = filter(lambda x: x is not None, f.split('_'))
if len(bits) > 1 and int(bits[0]):
migrations.append(f)
except:
pass
return migrations
def is_mapper_checkpointed(self):
"""
:return:
"""
if self.mapper_save_location in os.listdir(self.get_mapper_folder_location()):
return True
else:
return False