def test_gene(self):
res = self.json_ok(self.get_ok(self.api + '/gene/1017'))
eq_(res['entrezgene'], 1017)
# testing non-ascii character
self.get_404(self.api + '/gene/' +
'54097\xef\xbf\xbd\xef\xbf\xbdmouse')
# commented out this test, as no more
# allow dot in the geneid
# res = self.json_ok(self.get_ok(self.api + '/gene/Y105C5B.255'))
# testing filtering parameters
res = self.json_ok(self.get_ok(self.api +
'/gene/1017?fields=symbol,name,entrezgene'))
eq_(set(res), set(['_id', '_score', 'symbol', 'name', 'entrezgene']))
res = self.json_ok(self.get_ok(self.api +
'/gene/1017?filter=symbol,go.MF'))
eq_(set(res), set(['_id', '_score', 'symbol', 'go']))
assert "MF" in res["go"]
self.get_404(self.api + '/gene')
self.get_404(self.api + '/gene/')
python类eq_()的实例源码
def test_unicode(self):
s = u'??'
self.get_404(self.api + '/gene/' + s)
res = self.json_ok(self.post_ok(self.api + '/gene', {'ids': s}))
eq_(res[0]['notfound'], True)
eq_(len(res), 1)
res = self.json_ok(self.post_ok(self.api + '/gene',
{'ids': '1017, ' + s}))
eq_(res[1]['notfound'], True)
eq_(len(res), 2)
res = self.json_ok(self.get_ok(self.api + '/query?q=' + s))
eq_(res['hits'], [])
res = self.json_ok(self.post_ok(self.api + '/query',
{"q": s, "scopes": 'symbol'}))
eq_(res[0]['notfound'], True)
eq_(len(res), 1)
res = self.json_ok(self.post_ok(self.api + '/query',
{"q": 'cdk2+' + s}))
eq_(res[1]['notfound'], True)
eq_(len(res), 2)
def test_taxonomy(self):
res = self.json_ok(self.get_ok(self.api + '/species/1239'))
ok_("lineage" in res)
res = self.json_ok(self.get_ok(self.api +
'/species/46170?include_children=true'))
ok_(len(res['children']) >= 305)
res2 = self.json_ok(self.get_ok(self.api +
'/species/46170?include_children=true&has_gene=1'))
ok_(len(res2['children']) >= 16)
ok_(len(res2['children']) <= len(res['children']))
u = '/query?q=lytic%20enzyme&species=1386&include_tax_tree=true'
res = self.json_ok(self.get_ok(self.api + u))
ok_(res['total'] >= 2)
res2 = self.json_ok(self.get_ok(self.api +
'/query?q=lytic%20enzyme&species=1386'))
eq_(res2['total'], 0)
def test_query_dotstar_refseq(self):
protein = self.json_ok(self.get_ok(self.api +
"/query?q=refseq:NP_001670&fields=refseq"),
filter=True)
u = "/query?q=refseq:NM_001679&fields=refseq"
rna = self.json_ok(self.get_ok(self.api + u), filter=True)
genomic = self.json_ok(self.get_ok(self.api +
"/query?q=refseq:NT_005612&fields=refseq"),
filter=True)
u = "/query?q=refseq.protein:NP_001670&fields=refseq"
explicit_protein = self.json_ok(self.get_ok(self.api + u), filter=True)
self._filter_hits(explicit_protein)
u = "/query?q=refseq.rna:NM_001679&fields=refseq"
explicit_rna = self.json_ok(self.get_ok(self.api + u), filter=True)
u = "/query?q=refseq.genomic:NT_005612&fields=refseq"
explicit_genomic = self.json_ok(self.get_ok(self.api + u), filter=True)
eq_(protein["hits"], explicit_protein["hits"])
eq_(rna["hits"], explicit_rna["hits"])
eq_(genomic["hits"], explicit_genomic["hits"])
eq_(protein["hits"], rna["hits"]) # same result whatever the query
eq_(genomic["hits"], []) # genomic not indexed
eq_(rna["total"], 1)
hit = rna["hits"][0]
eq_(hit["refseq"]["protein"], "NP_001670.1")
eq_(hit["refseq"]["rna"], "NM_001679.3")
def test_query_dotstar_accession(self):
protein = self.json_ok(self.get_ok(self.api +
"/query?q=accession:AAH68303&fields=accession"),
filter=True)
rna = self.json_ok(self.get_ok(self.api +
"/query?q=accession:BC068303&fields=accession"),
filter=True)
genomic = self.json_ok(self.get_ok(self.api +
"/query?q=accession:FJ497232&fields=accession"),
filter=True)
u = "/query?q=accession.protein:AAH68303&fields=accession"
explicit_protein = self.json_ok(self.get_ok(self.api + u), filter=True)
u = "/query?q=accession.rna:BC068303&fields=accession"
explicit_rna = self.json_ok(self.get_ok(self.api + u), filter=True)
u = "/query?q=accession.genomic:FJ497232&fields=accession"
explicit_genomic = self.json_ok(self.get_ok(self.api + u), filter=True)
eq_(protein["hits"], explicit_protein["hits"])
eq_(rna["hits"], explicit_rna["hits"])
eq_(genomic["hits"], explicit_genomic["hits"])
eq_(protein["hits"], rna["hits"]) # same result whatever the query
eq_(genomic["hits"], []) # genomic not indexed
eq_(rna["total"], 1)
hit = rna["hits"][0]
assert "AAH68303.1" in hit["accession"]["protein"]
assert "BC068303.1" in hit["accession"]["rna"]
def test_query_ensembl(self):
u = "/query?q=ensemblprotein:ENSP00000379391&fields=ensembl"
prot = self.json_ok(self.get_ok(self.api + u))
u = "/query?q=ensembltranscript:ENST00000396082&fields=ensembl"
rna = self.json_ok(self.get_ok(self.api + u))
u = "/query?q=ensemblgene:ENSG00000100373&fields=ensembl"
gene = self.json_ok(self.get_ok(self.api + u))
# don' compare score, useless
[d["hits"][0].pop("_score") for d in [prot, rna, gene]]
eq_(prot["hits"], rna["hits"])
eq_(rna["hits"], gene["hits"])
eq_(rna["total"], 1)
hit = rna["hits"][0]
eq_(hit["ensembl"]["gene"], "ENSG00000100373")
assert "ENSP00000216211" in hit["ensembl"]["protein"]
assert "ENST00000216211" in hit["ensembl"]["transcript"]
# POST /gene batch
resl = self.json_ok(self.post_ok(self.api + '/gene', {'ids': 'ENSG00000148795'}))
eq_(len(resl), 1)
res = resl[0]
eq_(res["_id"], "1586")
def test_disambiguate_ensembl_entrez_ids(self):
# some random test reported by users
res = self.json_ok(self.get_ok(self.api + "/query?q=ensembl.transcript:ENSMUST00000161459"))
eq_(len(res["hits"]),1)
eq_(res["hits"][0]["symbol"],"Setdb2")
res = self.json_ok(self.get_ok(self.api + "/gene/ENSG00000011454"))
eq_(type(res),dict)
eq_(res["entrezgene"],23637)
res = self.json_ok(self.get_ok(self.api + "/gene/ENSG00000237613"))
eq_(type(res),dict)
eq_(res["entrezgene"],645520)
### test "orphan" EntrezID (associated EnsemblIDs were all resolved into other EntrezIDs but we want to keep ambiguated
### Ensembl data for those)
###res = self.json_ok(self.get_ok(self.api + "/gene/100287596"))
###ensids = [e["gene"] for e in res["ensembl"]]
###eq_(set(endids),{"ENSG00000248472","ENSG00000223972"})
def test_int_float(self):
def check_homologene(res):
for h in res["homologene"]["genes"]:
eq_(type(h[0]),int)
eq_(type(h[1]),int)
def check_exons(res):
for ex in res["exons"]:
for pos in ex["position"]:
eq_(type(pos[0]),int)
eq_(type(pos[1]),int)
res = self.json_ok(self.get_ok(self.api + "/gene/1017?species=9606&fields=homologene,exons"))
check_homologene(res)
check_exons(res)
resall = self.json_ok(self.get_ok(self.api + "/gene/1017?fields=homologene,exons"))
check_homologene(resall)
check_exons(resall)
# Self contained test class, used for CI tools such as Travis
# This will start a Tornado server on its own and perform tests
# against this server.
def test_simple_query(self):
conn, tracer = self._get_conn_tracer()
writer = tracer.writer
cursor = conn.cursor()
cursor.execute("SELECT 1")
rows = cursor.fetchall()
eq_(len(rows), 1)
spans = writer.pop()
eq_(len(spans), 1)
span = spans[0]
eq_(span.service, self.TEST_SERVICE)
eq_(span.name, 'mysql.query')
eq_(span.span_type, 'sql')
eq_(span.error, 0)
assert_dict_issuperset(span.meta, {
'out.host': u'127.0.0.1',
'out.port': u'3306',
'db.name': u'test',
'db.user': u'test',
'sql.query': u'SELECT 1',
})
# eq_(span.get_metric('sql.rows'), -1)
def test_serialize_with_data(self):
self.setUp_with_data()
buf = self._test_serialize()
res = struct.unpack_from(sctp.chunk_data._PACK_STR, buf)
eq_(sctp.chunk_data.chunk_type(), res[0])
flags = (
(self.unordered << 2) |
(self.begin << 1) |
(self.end << 0))
eq_(flags, res[1])
eq_(self.length, res[2])
eq_(self.tsn, res[3])
eq_(self.sid, res[4])
eq_(self.seq, res[5])
eq_(self.payload_id, res[6])
eq_(self.payload_data, buf[sctp.chunk_data._MIN_LEN:])
def test_build_sctp(self):
eth = ethernet.ethernet('00:aa:aa:aa:aa:aa', '00:bb:bb:bb:bb:bb',
ether.ETH_TYPE_IP)
ip4 = ipv4.ipv4(4, 5, 16, 0, 0, 2, 0, 64, inet.IPPROTO_SCTP, 0,
'192.168.1.1', '10.144.1.1')
pkt = eth / ip4 / self.sc
eth = pkt.get_protocol(ethernet.ethernet)
ok_(eth)
eq_(eth.ethertype, ether.ETH_TYPE_IP)
ip4 = pkt.get_protocol(ipv4.ipv4)
ok_(ip4)
eq_(ip4.proto, inet.IPPROTO_SCTP)
sc = pkt.get_protocol(sctp.sctp)
ok_(sc)
eq_(sc, self.sc)
def configure_storage(prefix):
engine = prefix.virt_env.engine_vm()
storage_script = os.path.join(
os.environ.get('SUITE'), 'setup_storage.sh'
)
engine.copy_to(
storage_script,
'/tmp/setup_storage.sh',
)
result = engine.ssh(
[
'/tmp/setup_storage.sh',
],
)
nt.eq_(
result.code, 0, 'setup_storage.sh failed. Exit code is %s' % result.code
)
def run_log_collector(prefix):
engine = prefix.virt_env.engine_vm()
result = engine.ssh(
[
'ovirt-log-collector',
'--verbose',
'--conf-file=/root/ovirt-log-collector.conf',
],
)
nt.eq_(
result.code, 0, 'log collector failed. Exit code is %s' % result.code
)
engine.ssh(
[
'rm',
'-rf',
'/dev/shm/sosreport-LogCollector-*',
],
)
def run_log_collector(prefix):
engine = prefix.virt_env.engine_vm()
result = engine.ssh(
[
'ovirt-log-collector',
'--conf-file=/root/ovirt-log-collector.conf',
],
)
nt.eq_(
result.code, 0, 'log collector failed. Exit code is %s' % result.code
)
engine.ssh(
[
'rm',
'-rf',
'/dev/shm/sosreport-LogCollector-*',
],
)
def run_log_collector(prefix):
engine = prefix.virt_env.engine_vm()
result = engine.ssh(
[
'ovirt-log-collector',
'--verbose',
'--conf-file=/root/ovirt-log-collector.conf',
],
)
nt.eq_(
result.code, 0, 'log collector failed. Exit code is %s' % result.code
)
engine.ssh(
[
'rm',
'-rf',
'/dev/shm/sosreport-LogCollector-*',
],
)
def tweak_db(prefix):
engine = prefix.virt_env.engine_vm()
tweak_db_file = os.path.join(
os.environ.get('SUITE'),
'../common/deploy-scripts/db_config_tweaks.sh'
)
engine.copy_to(tweak_db_file, '/root')
result = engine.ssh(
[
'bash',
'/root/db_config_tweaks.sh',
],
)
nt.eq_(
result.code, 0, 'tweaking postgres configuration failed. Exit code is %s' % result.code
)
def run_log_collector(prefix):
engine = prefix.virt_env.engine_vm()
result = engine.ssh(
[
'ovirt-log-collector',
'--conf-file=/root/ovirt-log-collector.conf',
],
)
nt.eq_(
result.code, 0, 'log collector failed. Exit code is %s' % result.code
)
engine.ssh(
[
'rm',
'-rf',
'/dev/shm/sosreport-LogCollector-*',
],
)
def run_log_collector(prefix):
engine = prefix.virt_env.engine_vm()
result = engine.ssh(
[
'ovirt-log-collector',
'--conf-file=/root/ovirt-log-collector.conf',
],
)
nt.eq_(
result.code, 0, 'log collector failed. Exit code is %s' % result.code
)
engine.ssh(
[
'rm',
'-rf',
'/dev/shm/sosreport-LogCollector-*',
],
)
def configure_storage(prefix):
engine = prefix.virt_env.engine_vm()
storage_script = os.path.join(
os.environ.get('SUITE'), 'setup_storage.sh'
)
engine.copy_to(
storage_script,
'/tmp/setup_storage.sh',
)
result = engine.ssh(
[
'/tmp/setup_storage.sh',
],
)
nt.eq_(
result.code, 0, 'setup_storage.sh failed. Exit code is %s' % result.code
)
def test_distance_to_coords_formula_lngwise(self):
""" tests the longitudional (east, west) aspect of the formula to see
if we get waypoints that have the same latitude, different lng """
point_lat = 40.8131494
point_lng = -73.95032520000001
lngwise_potential_waypoints = distance_to_coords_formula(point_lat,
point_lng, 90, 270)
longitude_e = lngwise_potential_waypoints[0][1]
longitude_w = lngwise_potential_waypoints[1][1]
latitude_e = lngwise_potential_waypoints[0][0]
latitude_w = lngwise_potential_waypoints[1][0]
eq_(latitude_e, latitude_w)
eq_(latitude_e, point_lat)
eq_(latitude_w, point_lat)
assert_not_equal(longitude_e, longitude_w)
assert_not_equal(longitude_e, point_lng)
assert_not_equal(longitude_w, point_lng)
def test_distance_to_coords_formula_latwise(self):
""" tests the latitudional (north, south) aspect of the formula to see
if we get waypoints that have the same longitude, different lat """
point_lat = 40.8131494
point_lng = -73.95032520000001
latwise_potential_waypoints = distance_to_coords_formula(point_lat,
point_lng, 0, 180)
longitude_n = latwise_potential_waypoints[0][1]
longitude_s = latwise_potential_waypoints[1][1]
latitude_n = latwise_potential_waypoints[0][0]
latitude_s = latwise_potential_waypoints[1][0]
eq_(longitude_n, longitude_s)
eq_(longitude_n, point_lng)
eq_(longitude_s, point_lng)
assert_not_equal(latitude_n, latitude_s)
assert_not_equal(latitude_n, point_lat)
assert_not_equal(latitude_s, point_lat)
def test_serialize_with_data(self):
self.setUp_with_data()
buf = self._test_serialize()
res = struct.unpack_from(sctp.chunk_data._PACK_STR, buf)
eq_(sctp.chunk_data.chunk_type(), res[0])
flags = (
(self.unordered << 2) |
(self.begin << 1) |
(self.end << 0))
eq_(flags, res[1])
eq_(self.length, res[2])
eq_(self.tsn, res[3])
eq_(self.sid, res[4])
eq_(self.seq, res[5])
eq_(self.payload_id, res[6])
eq_(self.payload_data, buf[sctp.chunk_data._MIN_LEN:])
def test_build_sctp(self):
eth = ethernet.ethernet('00:aa:aa:aa:aa:aa', '00:bb:bb:bb:bb:bb',
ether.ETH_TYPE_IP)
ip4 = ipv4.ipv4(4, 5, 16, 0, 0, 2, 0, 64, inet.IPPROTO_SCTP, 0,
'192.168.1.1', '10.144.1.1')
pkt = eth / ip4 / self.sc
eth = pkt.get_protocol(ethernet.ethernet)
ok_(eth)
eq_(eth.ethertype, ether.ETH_TYPE_IP)
ip4 = pkt.get_protocol(ipv4.ipv4)
ok_(ip4)
eq_(ip4.proto, inet.IPPROTO_SCTP)
sc = pkt.get_protocol(sctp.sctp)
ok_(sc)
eq_(sc, self.sc)
def test_query(self):
# public query api at /query via get
self.query_has_hits('cdk2')
self.query_has_hits('GO:0004693')
self.query_has_hits('reporter:211803_at')
self.query_has_hits('IPR008351')
self.query_has_hits('hsa-mir-503')
self.query_has_hits('hsa-miR-503')
# test fielded query
self.query_has_hits('symbol:cdk2')
# test interval query
self.query_has_hits('chr1:151,073,054-151,383,976&species=human')
con = self.get_ok(self.api + '/query?q=cdk2&callback=mycallback')
ok_(con.startswith(b'mycallback('))
# testing non-ascii character
res = self.json_ok(self.get_ok(self.api +
'/query?q=54097\xef\xbf\xbd\xef\xbf\xbdmouse'))
eq_(res['hits'], [])
self.get_status_code(self.api + '/query', status_code=400)
#res = self.json_ok(self.get_ok(self.api + '/query'), checkerror=False)
#assert 'error' in res
self.get_status_code(self.api + '/query?q=tRNA:Y1:85Ae', status_code=400)
# ensure returned fields by default
res = self.json_ok(self.get_ok(self.api + '/query?q=cdk'))
# pick one
idx = random.randrange(0, 10)
deffields = res["hits"][idx].keys() # pick one...
expected = ["_id", "_score", "taxid", "entrezgene", "name", "symbol"]
assert sorted(list(deffields)) == sorted(expected), \
"%s != %s" % (sorted(list(deffields)), sorted(expected))
def test_query_post(self):
# /query via post
#self.json_ok(self.post_ok(self.api + '/query', {'q': '1017'}))
res = self.json_ok(self.post_ok(self.api + '/query',
{'q': '1017', 'scopes': 'entrezgene'}))
eq_(len(res), 1)
eq_(set(res[0].keys()),set(['query', 'taxid', '_score', 'entrezgene', 'symbol', '_id', 'name']))
eq_(res[0]['_id'], '1017')
res = self.json_ok(self.post_ok(self.api + '/query',
{'q': '211803_at,1018',
'scopes': 'reporter,entrezgene'}))
eq_(len(res), 2)
eq_(res[0]['_id'], '1017')
eq_(res[1]['_id'], '1018')
res = self.json_ok(self.post_ok(self.api + '/query',
{'q': 'CDK2',
'species': 'human,10090,frog,pig',
'scopes': 'symbol',
'fields': 'name,symbol'}))
assert len(res) >= 4, (res, len(res))
self.post_status_code(self.api + '/query', {}, status_code=400)
#res = self.json_ok(self.post_ok(self.api + '/query', {}),
# checkerror=False)
#assert 'error' in res, res
res = self.json_ok(self.post_ok(self.api + '/query',
{'q': '[1017, "1018"]',
'scopes': 'entrezgene',
'jsoninput': 'true'}))
eq_(len(res), 2)
eq_(res[0]['_id'], '1017')
eq_(res[1]['_id'], '1018')
def test_query_size(self):
res = self.json_ok(self.get_ok(self.api + '/query?q=cdk?'))
eq_(len(res['hits']), 10) # default is 10
ok_(res['total'] > 10)
res = self.json_ok(self.get_ok(self.api + '/query?q=cdk?&size=0'))
eq_(len(res['hits']), 0)
res = self.json_ok(self.get_ok(self.api + '/query?q=cdk?&limit=20'))
eq_(len(res['hits']), 20)
res1 = self.json_ok(self.get_ok(self.api +
'/query?q=cdk?&from=0&size=20'))
res = self.json_ok(self.get_ok(self.api +
'/query?q=cdk?&skip=10&size=20'))
eq_(len(res['hits']), 20)
# print res1['hits'].index(res['hits'][0])
# print [x['_id'] for x in res1['hits']]
# eq_(res['hits'][0], res1['hits'][10])
assert res['hits'][0] in res1['hits']
# API doc says cap 1000
res = self.json_ok(self.get_ok(self.api + '/query?q=*&size=1000'))
eq_(len(res['hits']), 1000)
res = self.json_ok(self.get_ok(self.api + '/query?q=*&size=1001'))
eq_(len(res['hits']), 1000)
res = self.json_ok(self.get_ok(self.api + '/query?q=*&size=2000'))
eq_(len(res['hits']), 1000)
# assert 1==0
self.get_status_code(self.api + '/query?q=cdk?&size=1a', status_code=400)
def test_gene_post(self):
res = self.json_ok(self.post_ok(self.api + '/gene', {'ids': '1017'}))
eq_(len(res), 1)
# check default fields returned
eq_(set(res[0].keys()),set(['symbol', 'reporter', 'refseq', '_score', 'pdb', 'interpro', 'entrezgene',
'summary', 'genomic_pos_hg19', 'unigene', 'ipi', 'taxid', 'pfam', 'homologene',
'ensembl', 'ec', 'pir', 'type_of_gene', 'pathway', 'exons_hg19', 'MIM', 'generif',
'HGNC', 'name', 'reagent', 'uniprot', 'pharmgkb', 'alias', 'genomic_pos',
'accession', '_id', 'prosite', 'wikipedia', 'go', 'query', 'Vega', 'map_location',
'exons', 'exac','other_names','umls']))
eq_(res[0]['entrezgene'], 1017)
res = self.json_ok(self.post_ok(self.api + '/gene',
{'ids': '1017, 1018'}))
eq_(len(res), 2)
eq_(res[0]['_id'], '1017')
eq_(res[1]['_id'], '1018')
res = self.json_ok(self.post_ok(self.api + '/gene',
{'ids': '1017,1018',
'fields': 'symbol,name,entrezgene'}))
eq_(len(res), 2)
for _g in res:
eq_(set(_g), set(['_id', '_score', 'query', 'symbol',
'name', 'entrezgene']))
res = self.json_ok(self.post_ok(self.api + '/gene',
{'ids': '1017,1018',
'filter': 'symbol,go.MF'}))
eq_(len(res), 2)
for _g in res:
eq_(set(_g), set(['_id', '_score', 'query', 'symbol', 'go']))
assert "MF" in _g["go"]
# get retired gene (make sure _search ES query is run)
res = self.json_ok(self.post_ok(self.api + '/gene',{'ids': '791256'}))
eq_(res[0]['_id'], '50846') # this is the corresponding _id field
def test_query_facets(self):
res = self.json_ok(self.get_ok(self.api +
'/query?q=cdk?&facets=taxid&species=human,mouse,rat'))
ok_('facets' in res)
ok_('taxid' in res['facets'])
eq_(res['facets']['taxid']['total'], res['total'])
eq_(res['facets']['taxid']['other'], 0)
eq_(res['facets']['taxid']['missing'], 0)
u = '/query?q=cdk?&facets=taxid&species_facet_filter=human&species=human,mouse,rat'
res2 = self.json_ok(self.get_ok(self.api + u))
eq_(res2['facets']['taxid']['total'], res['total'])
eq_(res2['facets']['taxid'], res['facets']['taxid'])
eq_([x["count"] for x in res2['facets']['taxid']['terms']
if x["term"] == 9606][0], res2['total'])
def test_query_userfilter(self):
res1 = self.json_ok(self.get_ok(self.api + '/query?q=cdk'))
res2 = self.json_ok(self.get_ok(self.api +
'/query?q=cdk&userfilter=bgood_cure_griffith'))
ok_(res1['total'] > res2['total'])
# nonexisting user filter gets ignored.
res2 = self.json_ok(self.get_ok(self.api +
'/query?q=cdk&userfilter=aaaa'))
eq_(res1['total'], res2['total'])
def test_dotfield(self):
# /query service
# default dotfield=0
rdefault = self.json_ok(self.get_ok(self.api +
'/query?q=ccnk&fields=refseq.rna'))
# force no dotfield
rfalse = self.json_ok(self.get_ok(self.api +
'/query?q=ccnk&fields=refseq.rna&dotfield=false'))
# force dotfield
rtrue = self.json_ok(self.get_ok(self.api +
'/query?q=ccnk&fields=refseq.rna&dotfield=true'))
# check defaults and bool params
# TODO: put this in json_ok as post-process filter ?
for d in [rdefault,rfalse,rtrue]:
for h in d["hits"]:
del h["_score"]
eq_(rdefault["hits"], rfalse["hits"])
# check struct
assert "refseq.rna" in rtrue["hits"][0].keys()
assert "refseq" in rdefault["hits"][0].keys()
assert "rna" in rdefault["hits"][0]["refseq"].keys()
# TODO: no fields but dotfield => dotfield results
# TODO: fields with dot but no dotfield => dotfield results
# /gene service
rdefault = self.json_ok(self.get_ok(self.api +
'/gene/1017?filter=symbol,go.MF'))
rtrue = self.json_ok(self.get_ok(self.api +
'/gene/1017?filter=symbol,go.MF&dotfield=true'))
rfalse = self.json_ok(self.get_ok(self.api +
'/gene/1017?filter=symbol,go.MF&dotfield=false'))
# sharding makes scoring slightly variable
rdefault.pop("_score")
rfalse.pop("_score")
eq_(rdefault, rfalse)
assert "go.MF.term" in rtrue.keys()
assert "go" in rdefault.keys()
assert "MF" in rdefault["go"].keys()