def get_url(hass, url):
websession = async_get_clientsession(hass)
request = None
try:
with async_timeout.timeout(10, loop=hass.loop):
request = yield from websession.get(url)
if request.status != 200:
_LOGGER.error("Error %d on load url %s",
request.status, request.url)
return None
return (yield from request.read())
except (asyncio.TimeoutError, aiohttp.errors.ClientError):
_LOGGER.error('Timeout downloading url.')
finally:
if request is not None:
yield from request.release()
return None
python类read()的实例源码
def test_can_inspect_last_request(now):
"HTTPretty.last_request is a mimetools.Message request from last match"
HTTPretty.register_uri(HTTPretty.POST, "http://api.github.com/",
body='{"repositories": ["HTTPretty", "lettuce"]}')
request = urllib2.Request(
'http://api.github.com',
b'{"username": "gabrielfalcao"}',
{
'content-type': 'text/json',
},
)
fd = urlopen(request)
got = fd.read()
fd.close()
expect(HTTPretty.last_request.method).to.equal('POST')
expect(HTTPretty.last_request.body).to.equal(
b'{"username": "gabrielfalcao"}',
)
expect(HTTPretty.last_request.headers['content-type']).to.equal(
'text/json',
)
expect(got).to.equal(b'{"repositories": ["HTTPretty", "lettuce"]}')
def test_can_inspect_last_request_with_ssl(now):
"HTTPretty.last_request is recorded even when mocking 'https' (SSL)"
HTTPretty.register_uri(HTTPretty.POST, "https://secure.github.com/",
body='{"repositories": ["HTTPretty", "lettuce"]}')
request = urllib2.Request(
'https://secure.github.com',
b'{"username": "gabrielfalcao"}',
{
'content-type': 'text/json',
},
)
fd = urlopen(request)
got = fd.read()
fd.close()
expect(HTTPretty.last_request.method).to.equal('POST')
expect(HTTPretty.last_request.body).to.equal(
b'{"username": "gabrielfalcao"}',
)
expect(HTTPretty.last_request.headers['content-type']).to.equal(
'text/json',
)
expect(got).to.equal(b'{"repositories": ["HTTPretty", "lettuce"]}')
def test_httpretty_should_allow_registering_regexes():
"HTTPretty should allow registering regexes with urllib2"
HTTPretty.register_uri(
HTTPretty.GET,
re.compile("https://api.yipit.com/v1/deal;brand=(?P<brand_name>\w+)"),
body="Found brand",
)
request = urllib2.Request(
"https://api.yipit.com/v1/deal;brand=GAP",
)
fd = urllib2.urlopen(request)
got = fd.read()
fd.close()
expect(got).to.equal(b"Found brand")
def get_word_syllables(word, pronouncingDict):
# returns a list of transcriptions for a word
# (a word may have alternative pronunciations)
# eg. 'orange':
# -> [['AO1', 'R', 'AH0', 'N', 'JH'], ['A01', 'R', 'IH0', 'N', 'JH']]
# vowels are marked with numbers from 0-2
# by counting the vowels we can get the number of syllables
try:
# last element is more common
pronList = pronouncingDict[word.lower()][-1]
sylCount = len([ syl for syl in pronList if syl[-1].isdecimal() ])
return sylCount
except KeyError:
# scrape syllable count
url = 'http://www.syllablecount.com/syllables/' + word
request = urllib.request.urlopen(url)
response = request.read().decode('utf-8')
# use regex to match desired value
sylCount = int(
re.search("(?<=<b style='color: #008000'>)[0-9]+", response)[0]
)
return sylCount;
def GetCurrentValue(self):
try:
print(self.getUrl())
request = urllib.request.urlopen(self.getUrl())
return_val = request.read().decode('utf-8')
request.close()
return json.loads(return_val)
except:
return None
def wget(url):
"""
Download the page into a string
"""
import urllib.request, urllib.error, urllib.parse
request = urllib.request.urlopen(url)
":type: urllib2.req"
filestring = request.read()
return filestring
def _write_to_pipe(self):
with open(self.pipe_path,'w') as pipe:
pipe.write(self.fileobj.read())
def read_weather():
""" Reads the current weather state, if enabled, and stores it. """
# Only when explicitly enabled in settings.
if not should_sync():
return
# For backend logging in Supervisor.
print(' - Performing temperature reading at Buienradar.')
weather_settings = WeatherSettings.get_solo()
# Fetch XML from API.
request = urllib.request.urlopen(BUIENRADAR_API_URL)
response_bytes = request.read()
request.close()
response_string = response_bytes.decode("utf8")
# Use simplified xPath engine to extract current temperature.
root = ET.fromstring(response_string)
xpath = BUIENRADAR_XPATH.format(
weather_station_id=weather_settings.buienradar_station
)
temperature_element = root.find(xpath)
temperature = temperature_element.text
print(' - Read temperature: {}'.format(temperature))
# Gas readings trigger these readings, so the 'read at' timestamp should be somewhat in sync.
# Therefor we align temperature readings with them, having them grouped by hour that is..
read_at = timezone.now().replace(minute=0, second=0, microsecond=0)
try:
TemperatureReading.objects.create(read_at=read_at, degrees_celcius=Decimal(temperature))
except:
# Try again in 5 minutes.
weather_settings.next_sync = timezone.now() + timezone.timedelta(minutes=5)
else:
# Push next sync back for an hour.
weather_settings.next_sync = read_at + timezone.timedelta(hours=1)
weather_settings.save()
def async_live_stream(self, output):
metafile = re.findall(r"HTTP getting '(.*)'\n$", output)
if len(metafile) == 0:
_LOGGER.warning('No link found for %s', self._url)
_LOGGER.warning("output: %s", output)
return
websession = async_get_clientsession(self._hass)
request = None
try:
with async_timeout.timeout(10, loop=self._hass.loop):
request = yield from websession.get(metafile[0])
if request.status != 200:
_LOGGER.error("Error %d on load url %s",
request.status, request.url)
return
data = yield from request.read()
root = ET.fromstring(data)
nodes = root.findall('.//url')
if len(nodes) > 0:
return (yield from self.async_play_url(nodes[0].text))
else:
_LOGGER.error('no url')
except (asyncio.TimeoutError, aiohttp.errors.ClientError):
_LOGGER.error("Timeout.")
finally:
if request is not None:
yield from request.release()
return False
def test_httpretty_should_mock_a_simple_get_with_urllib2_read():
"HTTPretty should mock a simple GET with urllib2.read()"
HTTPretty.register_uri(HTTPretty.GET, "http://yipit.com/",
body="Find the best daily deals")
fd = urlopen('http://yipit.com')
got = fd.read()
fd.close()
expect(got).to.equal(b'Find the best daily deals')
def test_httpretty_provides_easy_access_to_querystrings(now):
"HTTPretty should provide an easy access to the querystring"
HTTPretty.register_uri(HTTPretty.GET, "http://yipit.com/",
body="Find the best daily deals")
fd = urllib2.urlopen('http://yipit.com/?foo=bar&foo=baz&chuck=norris')
fd.read()
fd.close()
expect(HTTPretty.last_request.querystring).to.equal({
'foo': ['bar', 'baz'],
'chuck': ['norris'],
})
def test_httpretty_should_support_a_list_of_successive_responses_urllib2(now):
"HTTPretty should support adding a list of successive " \
"responses with urllib2"
HTTPretty.register_uri(
HTTPretty.GET, "https://api.yahoo.com/test",
responses=[
HTTPretty.Response(body="first response", status=201),
HTTPretty.Response(body='second and last response', status=202),
])
request1 = urlopen('https://api.yahoo.com/test')
body1 = request1.read()
request1.close()
expect(request1.code).to.equal(201)
expect(body1).to.equal(b'first response')
request2 = urlopen('https://api.yahoo.com/test')
body2 = request2.read()
request2.close()
expect(request2.code).to.equal(202)
expect(body2).to.equal(b'second and last response')
request3 = urlopen('https://api.yahoo.com/test')
body3 = request3.read()
request3.close()
expect(request3.code).to.equal(202)
expect(body3).to.equal(b'second and last response')
def test_callback_response(now):
("HTTPretty should all a callback function to be set as the body with"
" urllib2")
def request_callback(request, uri, headers):
return [200, headers, "The {0} response from {1}".format(decode_utf8(request.method), uri)]
HTTPretty.register_uri(
HTTPretty.GET, "https://api.yahoo.com/test",
body=request_callback)
fd = urllib2.urlopen('https://api.yahoo.com/test')
got = fd.read()
fd.close()
expect(got).to.equal(b"The GET response from https://api.yahoo.com/test")
HTTPretty.register_uri(
HTTPretty.POST, "https://api.yahoo.com/test_post",
body=request_callback)
request = urllib2.Request(
"https://api.yahoo.com/test_post",
b'{"username": "gabrielfalcao"}',
{
'content-type': 'text/json',
},
)
fd = urllib2.urlopen(request)
got = fd.read()
fd.close()
expect(got).to.equal(b"The POST response from https://api.yahoo.com/test_post")
def get_word_difficulty(word):
url = 'http://www.dictionary.com/browse/' + word
request = urllib.request.urlopen(url)
difficulty = re.search(
'(?<=data-difficulty=")[0-9]+', request.read().decode('utf-8')
)
if difficulty:
return difficulty[0]
def fetch_and_parse_from_url(self, url, parser, force=False, **options):
with urllib.request.urlopen(url) as request:
with tempfile.TemporaryFile() as csvfile:
csvfile.write(request.read())
csvfile.seek(0)
reader = csv.DictReader(io.TextIOWrapper(csvfile, encoding='utf-8'))
total = 0
imported = 0
import_errors = 0
for row in reader:
total += 1
row = self.format_csv_fields(row)
logger.debug(row)
exists = parser.exists_in_db(row)
if exists and not force:
# Skip this row
continue
id = row.get(parser.key)
try:
data = parser.parse(row)
logger.debug(data)
model, created = parser.commit(data)
except Exception as err:
import_errors += 1
logger.error('%s "%s" could not be parsed: parse_errors=%s row=%s',
parser.name, id, err, row)
logger.exception(err)
continue
imported += 1
if created:
logger.info('Created %s "%s"', parser.name, id)
else:
logger.info('Updated %s "%s"', parser.name, id)
logger.info('Import %s data complete: total=%s imported=%s errors=%s',
parser.name, total, imported, import_errors)
def call_weather_api(location):
""" Call API """
request = urllib.request.urlopen(url % location)
payload = json.loads(request.read())
if 'cod' in payload and payload['cod'] == '200':
if 'list' in payload:
data = payload['list'][0]
location = data['name']
if 'dt' in data:
measured = datetime.utcfromtimestamp(data['dt'])
if datetime.utcnow() - timedelta(minutes=threshold) > measured:
text = '%s (%s UTC): ' % (location, measured.strftime('%Y-%m-%d %H:%M'))
else:
text = '%s: ' % location
else:
text = '%s: ' % location
main = data['main']
if 'temp' in main:
temperature = main['temp'] - 273.15 # temperature converted from kelvins to celcius and rounded
text += 'Temperature: %.1fc' % temperature
else:
temperature = None
if 'wind' in data and 'speed' in data['wind']:
wind = data['wind']['speed'] # Wind speed in mps (m/s)
else:
wind = None
if temperature and wind:
feels_like = 13.12 + 0.6215 * temperature - 11.37 * (wind * 3.6) ** 0.16 + 0.3965 * temperature * (wind * 3.6) ** 0.16
text += ', Feels like: %.1fc' % feels_like
if wind:
text += ', Wind: %.1f m/s' % wind
if 'humidity' in main:
humidity = main['humidity'] # Humidity in %
text += ', Humidity: %d%%' % humidity
if 'pressure' in main:
pressure = main['pressure'] # Atmospheric pressure in hPa
text += ', Pressure: %d hPa' % pressure
if 'clouds' in data and 'all' in data['clouds']:
cloudiness = data['clouds']['all'] # Cloudiness in %
text += ', Cloudiness: %d%%' % cloudiness
if temperature:
print(text.encode('utf-8'))
else:
print('Error: No data.')
else:
print('Error: Location %s not found.' % location)