def inlineCallbacks(f,*args, **kwargs):
# ...
try:
gen = f(*args, **kwargs)
except defer._DefGen_Return:
raise TypeError(
"inlineCallbacks requires %r to produce a generator; instead"
"caught returnValue being used in a non-generator" % (f,))
if not isinstance(gen, types.GeneratorType):
raise TypeError(
"inlineCallbacks requires %r to produce a generator; "
"instead got %r" % (f, gen))
return defer._inlineCallbacks(None, gen, defer.Deferred())
# ...
# ...
# ...
python类GeneratorType()的实例源码
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support interation over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def to_chunks(stream_or_generator):
"""This generator function receives file-like or generator as input
and returns generator.
:param file|__generator[bytes] stream_or_generator: readable stream or
generator.
:rtype: __generator[bytes]
:raise: TypeError
"""
if isinstance(stream_or_generator, types.GeneratorType):
yield from stream_or_generator
elif hasattr(stream_or_generator, 'read'):
while True:
chunk = stream_or_generator.read(CHUNK_SIZE)
if not chunk:
break # no more data
yield chunk
else:
raise TypeError('Input must be either readable or generator.')
def test_iter_quarters():
start = timezone.make_aware(datetime(2015, 11, 30, 1, 2, 3))
end = timezone.make_aware(datetime(2017, 2, 28, 11, 22, 33))
quarters = iter_quarters(start, end)
assert type(quarters) is types.GeneratorType
starts = [
datetime.combine(datetime(year, month, day).date(), start.timetz())
for year, month, day in [
(2015, 11, 30),
(2016, 2, 29), # leap!
(2016, 5, 30),
(2016, 8, 30),
(2016, 11, 30),
(2017, 2, 28),
]
]
ends = starts[1:] + [end]
assert list(quarters) == list(zip(starts, ends))
def test_iter_years():
start = timezone.make_aware(datetime(2016, 2, 29, 1, 2, 3))
end = timezone.make_aware(datetime(2019, 2, 28, 11, 22, 33))
years = iter_years(start, end)
assert type(years) is types.GeneratorType
starts = [
datetime.combine(datetime(year, month, day).date(), start.timetz())
for year, month, day in [
(2016, 2, 29), # leap!
(2017, 2, 28),
(2018, 2, 28),
(2019, 2, 28),
]
]
ends = starts[1:] + [end]
assert list(years) == list(zip(starts, ends))
def once_only(func):
called_funcs = {}
@wraps(func)
def wrapper(*args, **kwgs):
if func.__name__ not in called_funcs:
result = obj = func(*args, **kwgs)
if isinstance(obj, types.GeneratorType):
def gi_wrapper():
while True:
result = obj.next()
called_funcs[func.__name__] = result
yield result
return gi_wrapper()
else:
called_funcs[func.__name__] = result
return result
else:
return called_funcs[func.__name__]
return wrapper
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support iteration over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
test_organizations.py 文件源码
项目:threatstack-python-client
作者: MyPureCloud
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def test_list_organizations():
responses.add(responses.GET, "https://app.threatstack.com/api/v1/organizations",
content_type="application/json",
body='[ \
{"role": "user", "id": "acbd18db4cc2f85cedef654fccc4a4d8", "name": "Foo\'s Organization"}, \
{"role": "user", "id": "37b51d194a7513e45b56f6524f2d51f2", "name": "Bar\'s Organization"} \
]'
)
ts = ThreatStack(api_key="test_api_key", api_version=1)
response = ts.organizations.list()
assert isinstance(response, GeneratorType)
count = 0
for org in response:
count += 1
assert count == 2
dataset_providers_test.py 文件源码
项目:keras-image-captioning
作者: danieljl
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def test__batch_generator(self, dataset_provider, mocker):
mocker.patch.object(dataset_provider, '_preprocess_batch',
lambda x, _: x)
datum_list = range(10)
generator = dataset_provider._batch_generator(datum_list)
results = [next(generator) for _ in range(4)]
assert [len(x) for x in results] == [4, 4, 2, 4]
assert sorted(sum(results[:-1], [])) == datum_list
datum_list = range(12)
generator = dataset_provider._batch_generator(datum_list)
assert isinstance(generator, GeneratorType)
results = list(islice(generator, 4))
assert [len(x) for x in results] == [4, 4, 4, 4]
assert sorted(sum(results[:-1], [])) == datum_list
def test_run_generator(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
def target(input_):
yield
yield
return
# Just make sure the target is actually a generator
self.assertEqual(type(target(None)), types.GeneratorType)
task = TaskMixin(process = target)
task.handle_task('token', None)
self.assertEqual(task.token, None)
call = mock.call.send_task_success(taskToken = 'token',
output = 'null')
call_ = mock.call.send_task_heartbeat(taskToken = 'token')
calls = [call_, call_, call]
self.assertEqual(client.mock_calls, calls)
def memoized(func):
''' A function decorator to make a function cache it's return values.
If a function returns a generator, it's transformed into a list and
cached that way. '''
cache = {}
def wrapper(*args):
if args in cache:
return cache[args]
val = func(*args)
if isinstance(val, types.GeneratorType):
val = list(val)
cache[args] = val
return val
wrapper.__doc__ = func.__doc__
wrapper.func_name = '%s_memoized' % func.func_name
return wrapper
def __call__( self, *args, **kwds ):
self.debugLogThreading( 'ThreadSwitchScheduler(%d:%s): start %r( %r, %r )' % (self.instance_id, self.reason, self.function, args, kwds) )
#pylint disable=bare-except
try:
# call the function
result = self.function( *args, **kwds )
# did the function run or make a generator?
if type(result) != types.GeneratorType:
self.debugLogThreading( 'ThreadSwitchScheduler(%d:%s): done (not GeneratorType)' % (self.instance_id, self.reason) )
# it ran - we are all done
return
# step the generator
self.queueNextSwitch( result )
except:
self.app.log.exception( 'ThreadSwitchScheduler(%d:%s)' % (self.instance_id, self.reason) )
def content_types_analyzer(node, source, validated):
"""Return for example
{'content_types': {
'artist': 8610, 'person': 3, 'cinema_review': 769, 'venue': 729,
'cultural_event': 2487, 'organization': 1, 'review': 4187}
}
only includes content type != 0
"""
if 'metadata_filter' in validated:
validated['metadata_filter'].pop('content_types', None)
objects = source(**validated)
index = find_catalog('system')['content_type']
intersection = index.family.IF.intersection
object_ids = getattr(objects, 'ids', objects)
if isinstance(object_ids, (list, types.GeneratorType)):
object_ids = index.family.IF.Set(object_ids)
result = [(content_type, len(intersection(object_ids, oids)))
for content_type, oids in index._fwd_index.items()]
result = dict([(k, v) for k, v in result if v != 0])
return {'content_types': result}
def states_analyzer(node, source, validated):
"""Return for example
{'states': {
'editable': 250, 'published': 16264, 'archived': 269, 'active': 3}
}
"""
if 'metadata_filter' in validated:
validated['metadata_filter'].pop('states', None)
objects = source(**validated)
index = find_catalog('dace')['object_states']
intersection = index.family.IF.intersection
object_ids = getattr(objects, 'ids', objects)
if isinstance(object_ids, (list, types.GeneratorType)):
object_ids = index.family.IF.Set(object_ids)
result = [(state_id, len(intersection(object_ids, oids)))
for state_id, oids in index._fwd_index.items()]
result = dict([(k, v) for k, v in result if v != 0])
return {'states': result}
def leaves(self, value):
if value is None:
raise ValueError('Leaves should be a list.')
elif not isinstance(value, list) and \
not isinstance(value, types.GeneratorType):
raise ValueError('Leaves should be a list or a generator (%s).' % type(value))
if self.prehashed:
# it will create a copy of list or
# it will create a new list based on the generator
self._leaves = list(value)
else:
self._leaves = [ShardManager.hash(leaf) for leaf in value]
if not len(self._leaves) > 0:
raise ValueError('Leaves must contain at least one entry.')
for leaf in self._leaves:
if not isinstance(leaf, six.string_types):
raise ValueError('Leaves should only contain strings.')
def test_make_tiles_tile_bounds(x, y):
'''
Test if children tiles from z10 are created correctly
'''
test_bounds = mercantile.bounds(x, y, 10)
test_bbox = list(mercantile.xy(test_bounds.west, test_bounds.south)) + list(mercantile.xy(test_bounds.east, test_bounds.north))
test_crs = 'epsg:3857'
test_minz = 10
test_maxz = 13
created_tiles_gen = _make_tiles(test_bbox, test_crs, test_minz, test_maxz)
assert isinstance(created_tiles_gen, types.GeneratorType)
created_tiles = list(created_tiles_gen)
assert len(created_tiles) == 85
def test_batch():
"""Test the batch feed dict generator."""
X = np.arange(100)
fd = {'X': X}
data = ab.batch(fd, batch_size=10, n_iter=10)
# Make sure this is a generator
assert isinstance(data, GeneratorType)
# Make sure we get a dict back of a length we expect
d = next(data)
assert isinstance(d, dict)
assert 'X' in d
assert len(d['X']) == 10
# Test we get all of X back in one sweep of the data
accum = list(d['X'])
for ds in data:
assert len(ds['X']) == 10
accum.extend(list(ds['X']))
assert len(accum) == len(X)
assert set(X) == set(accum)
def test_batch_predict():
"""Test the batch prediction feed dict generator."""
X = np.arange(100)
fd = {'X': X}
data = ab.batch_prediction(fd, batch_size=10)
# Make sure this is a generator
assert isinstance(data, GeneratorType)
# Make sure we get a dict back of a length we expect with correct indices
for ind, d in data:
assert isinstance(d, dict)
assert 'X' in d
assert len(d['X']) == 10
assert all(X[ind] == d['X'])
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support iteration over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def test_should_return_generator(self):
responses.add(
self.method,
self.url,
json={
'links': {},
'items': []
}
)
assert_that(
iterate_by_pagination(
method=self.method,
request_kwargs=self.request_kwargs,
requests_session=self.requests_session,
request_defaults=self.request_defaults
),
instance_of(types.GeneratorType)
)
pipeline.py 文件源码
项目:Sublime-uroboroSQL-formatter
作者: future-architect
项目源码
文件源码
阅读 32
收藏 0
点赞 0
评论 0
def __call__(self, stream):
"""Run the pipeline
Return a static (non generator) version of the result
"""
# Run the stream over all the filters on the pipeline
for filter in self:
# Functions and callable objects (objects with '__call__' method)
if isinstance(filter, collections.Callable):
stream = list(filter(stream))
# Normal filters (objects with 'process' method)
else:
stream = filter.process(None, stream)
# If last filter return a generator, staticalize it inside a list
if isinstance(stream, GeneratorType):
return list(stream)
return stream
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support iteration over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def getMimeType(self, contenttype, format_types, result=None):
supported_types = ["text/plain", "text/html", "application/yaml", "application/json"]
CONTENT_TYPES = {
"text/plain": str,
"text/html": self._text2htmlSerializer,
"application/yaml": self._resultyamlSerializer,
"application/json": j.db.serializers.getSerializerType('j').dumps
}
if not contenttype:
serializer = format_types["text"]["serializer"]
return CONTENT_TYPE_HTML, serializer
elif isinstance(result, types.GeneratorType):
return 'application/octet-stream', lambda x: x
else:
mimeType = mimeparse.best_match(supported_types, contenttype)
serializer = CONTENT_TYPES[mimeType]
return mimeType, serializer
def doAudit(user, path, kwargs, responsetime, statuscode, result, tags):
client = getClient('system')
audit = client.audit.new()
audit.user = user
audit.call = path
audit.statuscode = statuscode
audit.tags = tags
audit.args = json.dumps([]) # we dont want to log self
auditkwargs = kwargs.copy()
auditkwargs.pop('ctx', None)
audit.kwargs = json.dumps(auditkwargs)
try:
if not isinstance(result, types.GeneratorType):
audit.result = json.dumps(result)
else:
audit.result = json.dumps('Result of type generator')
except:
audit.result = json.dumps('binary data')
audit.responsetime = responsetime
client.audit.set(audit)
def process_result(results):
""""""
if isinstance(results, types.GeneratorType):
for result in results:
#pprint('')
if result['result']:
pprint('Success! %s' % result['from'] )
os.system('echo %s >> success.txt' % json.dumps(result['proxy']))
else:
pprint('Failed! %s' % result['from'] )
os.system('echo %s >> failed.txt' % result['from'])
else:
result = results
if result['result']:
pprint('Success! %s' % result['from'] )
os.system('echo %s >> success.txt' % json.dumps(result['proxy']))
else:
pprint('Failed! %s' % result['from'] )
os.system('echo %s >> failed.txt' % result['from'])
#----------------------------------------------------------------------
def compute(self, name, raise_exceptions=False):
"""
Compute a node and all necessary predecessors
Following the computation, if successful, the target node, and all necessary ancestors that were not already UPTODATE will have been calculated and set to UPTODATE. Any node that did not need to be calculated will not have been recalculated.
If any nodes raises an exception, then the state of that node will be set to ERROR, and its value set to an object containing the exception object, as well as a traceback. This will not halt the computation, which will proceed as far as it can, until no more nodes that would be required to calculate the target are COMPUTABLE.
:param name: Name of the node to compute
:param raise_exceptions: Whether to pass exceptions raised by node computations back to the caller
:type raise_exceptions: Boolean, default False
"""
if isinstance(name, (types.GeneratorType, list)):
calc_nodes = set()
for name0 in name:
for n in self._get_calc_nodes(name0):
calc_nodes.add(n)
else:
calc_nodes = self._get_calc_nodes(name)
self._compute_nodes(calc_nodes, raise_exceptions=raise_exceptions)
def __init__(self, generator):
"""
Parameters
----------
generator : function
the function (generator) to be used
"""
super(ExecutionPlan, self).__init__()
if not isinstance(generator, types.GeneratorType):
generator = generator()
assert isinstance(generator, types.GeneratorType)
self._generator = generator
self._running = True
self._finish_conditions = []
def ipwrap(value, query = ''):
try:
if isinstance(value, (list, tuple, types.GeneratorType)):
_ret = []
for element in value:
if ipaddr(element, query, version = False, alias = 'ipwrap'):
_ret.append(ipaddr(element, 'wrap'))
else:
_ret.append(element)
return _ret
else:
_ret = ipaddr(value, query, version = False, alias = 'ipwrap')
if _ret:
return ipaddr(_ret, 'wrap')
else:
return value
except:
return value
def __call__(self, stream):
"""Run the pipeline
Return a static (non generator) version of the result
"""
# Run the stream over all the filters on the pipeline
for filter in self:
# Functions and callable objects (objects with '__call__' method)
if isinstance(filter, collections.Callable):
stream = list(filter(stream))
# Normal filters (objects with 'process' method)
else:
stream = filter.process(None, stream)
# If last filter return a generator, staticalize it inside a list
if isinstance(stream, GeneratorType):
return list(stream)
return stream
def calculate(self, data, starting_month=1):
'''
First dimension of data should be time (months)
'''
# Check if distribution has been fit on historical data
if self.dist_type is None:
print ("You must fit a distribution first")
return False
if isinstance(data, types.GeneratorType):
pass
else:
spi = self.calculate_over_full_series(data, starting_month)
return spi