def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
self.__send_message(
_GetMore(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
python类OperationFailure()的实例源码
def __create_index(self, keys, index_options):
"""Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.
"""
index_doc = helpers._index_document(keys)
index = {"key": index_doc}
index.update(index_options)
with self._socket_for_writes() as sock_info:
cmd = SON([('createIndexes', self.name), ('indexes', [index])])
try:
self._command(
sock_info, cmd, read_preference=ReadPreference.PRIMARY)
except OperationFailure as exc:
if exc.code in common.COMMAND_NOT_FOUND_CODES:
index["ns"] = self.__full_name
wcn = (self.write_concern if
self.write_concern.acknowledged else WriteConcern())
self.__database.system.indexes._insert(
sock_info, index, True, False, False, wcn)
else:
raise
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def connect(self):
try:
logging.debug("Getting MongoDB connection to %s (replicaSet=%s, readPreference=%s, readPreferenceTags=%s, ssl=%s)" % (
self.uri,
self.replset,
self.read_pref,
self.do_rp_tags,
self.do_ssl(),
))
conn = MongoClient(**self.client_opts())
if self.do_connect:
conn['admin'].command({"ping": 1})
except (ConnectionFailure, OperationFailure, ServerSelectionTimeoutError), e:
logging.error("Unable to connect to %s! Error: %s" % (self.uri, e))
raise DBConnectionError(e)
if conn is not None:
self._conn = conn
return self._conn
def test_validate_collection(self, test_db):
with pytest.raises(TypeError):
await test_db.validate_collection(5)
with pytest.raises(TypeError):
await test_db.validate_collection(None)
await test_db.test.insert_one({'dummy': 'object'})
with pytest.raises(OperationFailure):
await test_db.validate_collection('test.doesnotexist"')
with pytest.raises(OperationFailure):
await test_db.validate_collection(test_db.test.doesnotexist)
assert await test_db.validate_collection('test')
assert await test_db.validate_collection(test_db.test)
assert await test_db.validate_collection(test_db.test, full=True)
assert await test_db.validate_collection(test_db.test, scandata=True)
assert await test_db.validate_collection(test_db.test, full=True, scandata=True)
assert await test_db.validate_collection(test_db.test, True, True)
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
self.__send_message(
_GetMore(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def __create_index(self, keys, index_options):
"""Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.
"""
index_doc = helpers._index_document(keys)
index = {"key": index_doc}
index.update(index_options)
with self._socket_for_writes() as sock_info:
cmd = SON([('createIndexes', self.name), ('indexes', [index])])
try:
self._command(
sock_info, cmd, read_preference=ReadPreference.PRIMARY)
except OperationFailure as exc:
if exc.code in common.COMMAND_NOT_FOUND_CODES:
index["ns"] = self.__full_name
wcn = (self.write_concern if
self.write_concern.acknowledged else WriteConcern())
self.__database.system.indexes._insert(
sock_info, index, True, False, False, wcn)
else:
raise
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
self.__send_message(
_GetMore(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def __create_index(self, keys, index_options):
"""Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.
"""
index_doc = helpers._index_document(keys)
index = {"key": index_doc}
index.update(index_options)
with self._socket_for_writes() as sock_info:
cmd = SON([('createIndexes', self.name), ('indexes', [index])])
try:
self._command(
sock_info, cmd, read_preference=ReadPreference.PRIMARY)
except OperationFailure as exc:
if exc.code in common.COMMAND_NOT_FOUND_CODES:
index["ns"] = self.__full_name
wcn = (self.write_concern if
self.write_concern.acknowledged else WriteConcern())
self.__database.system.indexes._insert(
sock_info, index, True, False, False, wcn)
else:
raise
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
self.__send_message(
_GetMore(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def drop_indexes(self):
"""Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
.. note:: The :attr:`~pymongo.collection.Collection.write_concern` of
this collection is automatically applied to this operation when using
MongoDB >= 3.4.
.. versionchanged:: 3.4
Apply this collection's write concern automatically to this operation
when connected to MongoDB >= 3.4.
"""
self.__database.client._purge_index(self.__database.name, self.__name)
self.drop_index("*")
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
self.__send_message(
_GetMore(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def __create_index(self, keys, index_options):
"""Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.
"""
index_doc = helpers._index_document(keys)
index = {"key": index_doc}
index.update(index_options)
with self._socket_for_writes() as sock_info:
cmd = SON([('createIndexes', self.name), ('indexes', [index])])
try:
self._command(
sock_info, cmd, read_preference=ReadPreference.PRIMARY)
except OperationFailure as exc:
if exc.code in common.COMMAND_NOT_FOUND_CODES:
index["ns"] = self.__full_name
wcn = (self.write_concern if
self.write_concern.acknowledged else WriteConcern())
self.__database.system.indexes._insert(
sock_info, index, True, False, False, wcn)
else:
raise
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
self.__send_message(
_GetMore(self.__collection.database.name,
self.__collection.name,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def __create_index(self, keys, index_options):
"""Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.
"""
index_doc = helpers._index_document(keys)
index = {"key": index_doc}
index.update(index_options)
with self._socket_for_writes() as sock_info:
cmd = SON([('createIndexes', self.name), ('indexes', [index])])
try:
self._command(
sock_info, cmd, read_preference=ReadPreference.PRIMARY)
except OperationFailure as exc:
if exc.code in common.COMMAND_NOT_FOUND_CODES:
index["ns"] = self.__full_name
wcn = (self.write_concern if
self.write_concern.acknowledged else WriteConcern())
self.__database.system.indexes._insert(
sock_info, index, True, False, False, wcn)
else:
raise
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
self.__send_message(
_GetMore(self.__collection.database.name,
self.__collection.name,
self.__batch_size,
self.__id,
self.__collection.codec_options))
else: # Cursor id is zero nothing else to return
self.__killed = True
return len(self.__data)
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._start(request_id, docs)
start = datetime.datetime.now()
try:
reply = self.sock_info.write_command(request_id, msg)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(request_id, exc.details, duration)
raise
finally:
self.start_time = datetime.datetime.now()
return reply
bebat_mongodb_user.py 文件源码
项目:Ansible-MongoDB-Install-Role
作者: bbatsche
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def update(self, uinfo, user, password, roles):
if roles_changed(uinfo, roles, self.database):
self.add(user, password, roles)
else:
test_client = self.get_client()
db = test_client[self.database]
try:
db.authenticate(user, password)
self.module.exit_json(changed=False, user=user)
except OperationFailure:
# If we get an operation failure, assume authentication failed, meaning we need to change the password
# This is...so not good practice, but it's a way to get idempotence from our task
self.add(user, password, roles)
def _command(self, sock_info, command, slave_ok=False,
read_preference=None,
codec_options=None, check=True, allowable_errors=None,
read_concern=DEFAULT_READ_CONCERN):
"""Internal command helper.
:Parameters:
- `sock_info` - A SocketInfo instance.
- `command` - The command itself, as a SON instance.
- `slave_ok`: whether to set the SlaveOkay wire protocol bit.
- `codec_options` (optional) - An instance of
:class:`~bson.codec_options.CodecOptions`.
- `check`: raise OperationFailure if there are errors
- `allowable_errors`: errors to ignore if `check` is True
- `read_concern` (optional) - An instance of
:class:`~pymongo.read_concern.ReadConcern`.
:Returns:
# todo: don't return address
(result document, address of server the command was run on)
"""
return sock_info.command(self.__database.name,
command,
slave_ok,
read_preference or self.read_preference,
codec_options or self.codec_options,
check,
allowable_errors,
read_concern=read_concern)
def drop_indexes(self):
"""Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
"""
self.__database.client._purge_index(self.__database.name, self.__name)
self.drop_index("*")
def legacy_write(self, request_id, msg, max_doc_size, acknowledged, docs):
"""A proxy for SocketInfo.legacy_write that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
cmd = self._start(request_id, docs)
start = datetime.datetime.now()
try:
result = self.sock_info.legacy_write(
request_id, msg, max_doc_size, acknowledged)
if self.publish:
duration = (datetime.datetime.now() - start) + duration
if result is not None:
reply = _convert_write_result(self.name, cmd, result)
else:
# Comply with APM spec.
reply = {'ok': 1}
self._succeed(request_id, reply, duration)
except OperationFailure as exc:
if self.publish:
duration = (datetime.datetime.now() - start) + duration
self._fail(
request_id,
_convert_write_result(
self.name, cmd, exc.details),
duration)
raise
finally:
self.start_time = datetime.datetime.now()
return result
def drop_indexes(self):
"""Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
"""
self.__database.connection._purge_index(self.__database.name,
self.__name)
self.drop_index(u"*")
def drop_index(self, index_or_name):
"""Drops the specified index on this collection.
Can be used on non-existant collections or collections with no
indexes. Raises OperationFailure on an error. `index_or_name`
can be either an index name (as returned by `create_index`),
or an index specifier (as passed to `create_index`). An index
specifier should be a list of (key, direction) pairs. Raises
TypeError if index is not an instance of (str, unicode, list).
.. warning::
if a custom name was used on index creation (by
passing the `name` parameter to :meth:`create_index` or
:meth:`ensure_index`) the index **must** be dropped by name.
:Parameters:
- `index_or_name`: index (or name of index) to drop
"""
name = index_or_name
if isinstance(index_or_name, list):
name = _gen_index_name(index_or_name)
if not isinstance(name, basestring):
raise TypeError("index_or_name must be an index name or list")
self.__database.connection._purge_index(self.__database.name,
self.__name, name)
self.__database.command("dropIndexes", self.__name, index=name,
allowable_errors=["ns not found"])
def drop_indexes(self):
"""Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
"""
self.__database.connection._purge_index(self.__database.name,
self.__name)
self.drop_index(u"*")
def drop_index(self, index_or_name):
"""Drops the specified index on this collection.
Can be used on non-existant collections or collections with no
indexes. Raises OperationFailure on an error. `index_or_name`
can be either an index name (as returned by `create_index`),
or an index specifier (as passed to `create_index`). An index
specifier should be a list of (key, direction) pairs. Raises
TypeError if index is not an instance of (str, unicode, list).
.. warning::
if a custom name was used on index creation (by
passing the `name` parameter to :meth:`create_index` or
:meth:`ensure_index`) the index **must** be dropped by name.
:Parameters:
- `index_or_name`: index (or name of index) to drop
"""
name = index_or_name
if isinstance(index_or_name, list):
name = _gen_index_name(index_or_name)
if not isinstance(name, basestring):
raise TypeError("index_or_name must be an index name or list")
self.__database.connection._purge_index(self.__database.name,
self.__name, name)
self.__database.command("dropIndexes", self.__name, index=name,
allowable_errors=["ns not found"])