def get(self, url):
with self._connect() as connection:
try:
item = source(connection, url)
self.write(json_encode(item))
except IndexError:
self.set_status(404, "Can't find '%s'" % url)
python类url()的实例源码
def delete(self, url):
with self._connect() as connection:
count = delete(connection, url)
self.set_status(204, "Deleted %s with count %d"% (url, count))
def post(self):
url = self.get_body_argument("url")
if not validators.url(url):
self.set_status(400, "bad URL")
return
with self._connect() as connection:
try:
createSource(connection, url)
except sqlite3.IntegrityError:
self.set_status(400, "duplicate URL")
return
self.set_status(201)
def get(self, url):
with self._connect() as connection:
try:
item = source(connection, url)
self.write(json_encode(item))
except IndexError:
self.set_status(404, "Can't find '%s'" % url)
def __parse_url(url):
"""return urlparsed url"""
return urlparse(url, allow_fragments=False)
def f_http_get(self, url, ):
"""send http get request"""
return self.__do_http_request('GET', url, self.input)
def f_http_post(self, url):
"""send http post request"""
return self.__do_http_request('POST', url, self.input)
def f_retrieve_image(self, url=None):
"""retrieve an image, image is stored in memory only
:rtype: StringIO object
:param url: Image URL
:return: Object of image
"""
if self.input:
if not self.input.startswith('http'):
self.input = 'http://' + self.input
output = BytesIO(requests.get(self.input).content)
else:
output = BytesIO(requests.get(url).content)
return output
def strip_link(url):
if validators.url(url) == True:
return url.split("/")[3]
else:
exit("[!] Not a valid URL")
def main():
parser = ArgumentParser(description="Transcode text from an imgur image to an mp3")
parser.add_argument("-l", "--link", dest="link_input", help="The text or image file to transcode", metavar="STRING")
parser.add_argument("-o", "--output", dest="file_output", help="The file name to output the result into (whether it be an image, or other file type)", metavar="FILE")
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
args = vars(parser.parse_args())
if args['link_input'] != None:
output_filename = args['file_output']
url = args['link_input']
if output_filename == None: #if a filename isn't specified, make it the imgur image code
output_fileWithExtension = strip_link(url)
output_file = output_fileWithExtension.split(".", 1)[0]
urllib.urlretrieve(url, "temp_"+output_fileWithExtension)
os.system("python pngify.py -i temp_"+output_fileWithExtension+" -o "+output_file+".mp3; rm temp_"+output_filename) #run pngify.py then delete temp file
else: #image name is specified
o = output_filename.split(".", 1)
output_file = output_filename.split(".", 1)[0]
if o[len(o)-1] != "png": #if the user didn't input the file extension, add it in automatically
output_filename = output_filename+".png"
urllib.urlretrieve(url, "temp_"+output_filename) #download file
os.system("python pngify.py -i temp_"+output_filename+" -o "+output_file+".mp3; rm temp_"+output_filename) #run pngify.py then delete temp file
def parse_url(url):
if url is None:
return None
url = url.strip()
if validators.url(url):
return url
if validators.url("http://%s" % url):
return "http://%s" % url
if url in ["n.a", 'non.e', '.0', '-.-', '.none', '.nil', 'N/A', 'TBC',
'under construction', '.n/a', '0.0', '.P', b'', 'no.website']:
return None
for i in ['http;//', 'http//', 'http.//', 'http:\\\\',
'http://http://', 'www://', 'www.http://']:
url = url.replace(i, 'http://')
url = url.replace('http:/www', 'http://www')
for i in ['www,', ':www', 'www:', 'www/', 'www\\\\', '.www']:
url = url.replace(i, 'www.')
url = url.replace(',', '.')
url = url.replace('..', '.')
if validators.url(url):
return url
if validators.url("http://%s" % url):
return "http://%s" % url
def get_domain(url=None, email=None):
if url is None:
return None
u = urlparse(url)
domain = u.netloc
if domain.startswith('www.'):
domain = domain[4:]
return domain
def enumerate_http_resources(package, package_path):
with (package_path / 'resource.json').open() as json_file:
resource = json.load(json_file)
for name, url in resource.get('images', {}).items():
if name != 'screenshots':
yield url, pathlib.Path(package, 'images')
for name, url in resource.get('assets', {}).get('uris', {}).items():
yield url, pathlib.Path(package, 'uris')
for k in resource.get('cli', {}).get('binaries', {}):
url = resource.get('cli', {}).get('binaries', {}).get(k, {}).get('x86-64', {}).get('url', '')
yield url, pathlib.Path(package, 'cli', k)
command_path = (package_path / 'command.json')
if command_path.exists():
with command_path.open() as json_file:
commands = json.load(json_file)
for url in commands.get("pip", []):
if url.startswith('http'):
yield url, pathlib.Path(package, 'commands')
def traverse_yield(d, key='root'):
if isinstance(d, dict):
if 'default' in d and str(d['default']).startswith('http'):
url = d['default']
if valid_download(url):
yield url, pathlib.Path(package, 'config', key)
else:
for k, v in d.items():
yield from traverse_yield(v, k)
config_path = (package_path / 'config.json')
if config_path.exists():
with config_path.open() as json_file:
config = json.load(json_file)
yield from traverse_yield(config)
def add_http_resource(dir_path, url, base_path):
archive_path = (dir_path / base_path / pathlib.Path(urllib.parse.urlparse(url).path).name)
print('Adding {} at {}.'.format(url, archive_path))
os.makedirs(str(archive_path.parent), exist_ok=True)
urllib.request.urlretrieve(url, str(archive_path))
def valid_download(url):
return bool(validators.url(url)) and int(httplib2.Http().request(url, 'HEAD')[0]['status']) < 400
def url_exists(path):
if validators.url(path):
return True
return False
def validate_account_signin(account):
result = {
'accountAlias': None,
'accountId': None,
'signinUri': 'https://' + account + '.signin.aws.amazon.com/',
'exists': False,
'error': None
}
if re.match(r'\d{12}', account):
result['accountId'] = account
else:
result['accountAlias'] = account
if not validators.url(result['signinUri']):
result['error'] = 'Invalid URI'
return result
try:
r = requests.get(result['signinUri'], allow_redirects=False)
if r.status_code == 302:
result['exists'] = True
except requests.exceptions.RequestException as e:
result['error'] = e
return result
def run(self):
while True:
if self.count >= self.workers_number:
break
try:
test = self.queue_logging.get()
if test is None:
self.count += 1
else:
if test['success']:
log.success('Authentication successful: Username: \"{}\" Password: \"{}\" URL: {}'.format(test['username'], test['password'], test['target']['url']))
else:
if self.verbose:
log.debug('Authentication failed: Username: \"{}\" Password: \"{}\" URL: {}'.format(test['username'], test['password'], test['target']['url']))
self.progress += 1
if self.progress % 10 == 0:
if self.verbose:
log.info('Progress : {}'.format(self.progress))
else:
log.info('Progress : {}'.format(self.progress), update=True)
except Exception as e:
traceback.print_exc()
log.error('WorkerLogging => {} : {}'.format(type(e), e))
log.info('Progress : {} (end)'.format(self.progress))
def has_url(_string):
if validators.url(_string):
return True
return False