def get_string(self):
"""
Returns:
String representation of YAML type
"""
d = Counter(self.atoms_name_list)
ordered_atoms = OrderedDict(sorted(d.items(),
key=lambda x: Specie(x[0]).Z))
if 'G' in ordered_atoms:
del ordered_atoms['G']
comment = ''.join(['{}{}'.format(k, v)
for k, v in ordered_atoms.items()])
output = {'comment': comment,
'lattice': self.lattice.tolist(),
'positions': self.positions.tolist(),
'numbers': self.numbers.tolist(),
'zoom': self.zoom}
return yaml.dump(output)
python类dump()的实例源码
def dump_meas_file(data, filename = "", flatten=False):
d = Dumper if filename and not flatten else FlatDumper
d.add_representer(Include, d.include)
if filename:
with open(filename+".tmp", 'w+') as fid:
yaml.dump(data, fid, Dumper=d)
# Upon success
move(filename+".tmp", filename)
with open(filename, 'r') as fid:
contents = fid.read()
return contents
else:
# dump to an IO stream:
# note you need to use the FlatDumper for this to work
out = StringIO()
yaml.dump(data, out, Dumper=d)
ret_string = out.getvalue()
out.close()
return ret_string
def save_table(table_loc, table):
"""
Saves a table of type ``table_type`` to a YAML file ``table_loc``
"""
table_list = [obj.to_dict_sparse() for obj_id, obj in table.items()]
table_obj = {
'db_version': DB_VERSION,
'data': table_list
}
if os.path.exists(table_loc):
# Cache a backup of this
shutil.copy2(table_loc, _get_bak_loc(table_loc))
with open(table_loc, 'w') as yf:
yaml.dump(table_obj, stream=yf, default_flow_style=False)
def save(self, vb=False):
if vb:
log.info("Saving...")
try:
with open('config.yaml', 'w') as fp:
yaml.dump(self.doc, fp, Dumper=yaml.RoundTripDumper)
except PermissionError:
log.err("No write access to config.yaml")
except IOError as e:
log.err("Could not open config.yaml: " + str(e))
except Exception as e:
log.err("An unexcpected exception of type: "
+ type(e).__name__
+ "has occurred: " + str(e))
else:
if vb:
log.info("Save complete")
return
def to_yaml(self, filename=None, default_flow_style=False,
encoding="utf-8", errors="strict",
**yaml_kwargs):
"""
Transform the Box object into a YAML string.
:param filename: If provided will save to file
:param default_flow_style: False will recursively dump dicts
:param encoding: File encoding
:param errors: How to handle encoding errors
:param yaml_kwargs: additional arguments to pass to yaml.dump
:return: string of YAML or return of `yaml.dump`
"""
return _to_yaml(self.to_dict(), filename=filename,
default_flow_style=default_flow_style,
encoding=encoding, errors=errors, **yaml_kwargs)
def to_json(self, filename=None,
encoding="utf-8", errors="strict",
multiline=False, **json_kwargs):
"""
Transform the BoxList object into a JSON string.
:param filename: If provided will save to file
:param encoding: File encoding
:param errors: How to handle encoding errors
:param multiline: Put each item in list onto it's own line
:param json_kwargs: additional arguments to pass to json.dump(s)
:return: string of JSON or return of `json.dump`
"""
if filename and multiline:
lines = [_to_json(item, filename=False, encoding=encoding,
errors=errors, **json_kwargs) for item in self]
with open(filename, 'w', encoding=encoding, errors=errors) as f:
f.write("\n".join(lines).decode('utf-8') if
sys.version_info < (3, 0) else "\n".join(lines))
else:
return _to_json(self.to_list(), filename=filename,
encoding=encoding, errors=errors, **json_kwargs)
def to_yaml(self, filename=None, default_flow_style=False,
encoding="utf-8", errors="strict",
**yaml_kwargs):
"""
Transform the BoxList object into a YAML string.
:param filename: If provided will save to file
:param default_flow_style: False will recursively dump dicts
:param encoding: File encoding
:param errors: How to handle encoding errors
:param yaml_kwargs: additional arguments to pass to yaml.dump
:return: string of YAML or return of `yaml.dump`
"""
return _to_yaml(self.to_list(), filename=filename,
default_flow_style=default_flow_style,
encoding=encoding, errors=errors, **yaml_kwargs)
def _format_yaml_dump(data):
"""
Format yaml-dump to make file more readable
(yaml structure must be dumped to a stream before using this function)
| Currently does the following:
| - Add an empty line before a new item
:param data: string to format
:return: formatted string
"""
data = data.replace('\n\n', '\n')
ldata = data.split('\n')
rdata = []
for index, line in enumerate(ldata):
if line[-1:] == ':':
# no empty line before list attributes
if ldata[index+1].strip()[0] != '-':
rdata.append('')
rdata.append(line)
else:
rdata.append(line)
fdata = '\n'.join(rdata)
return fdata
def yaml_save_roundtrip(filename, data, create_backup=False):
"""
Dump yaml using the RoundtripDumper and correct linespacing in output file
:param filename: name of the yaml file to save to
:param data: data structure to save
"""
if not EDITING_ENABLED:
return
sdata = yaml.dump(data, Dumper=yaml.RoundTripDumper, version=yaml_version, indent=indent_spaces, block_seq_indent=block_seq_indent, width=12288, allow_unicode=True)
# with open(filename+'_raw'+YAML_FILE, 'w') as outfile:
# outfile.write( sdata )
if create_backup:
if os.path.isfile(filename+YAML_FILE):
shutil.copy2(filename+YAML_FILE, filename+'.bak')
sdata = _format_yaml_dump2( sdata )
with open(filename+YAML_FILE, 'w') as outfile:
outfile.write( sdata )
def _ordered_dump(data, stream=None, Dumper=yaml.Dumper, **kwds):
"""
Ordered yaml dumper
Use this instead ot yaml.Dumper/yaml.SaveDumper to get an Ordereddict
:param stream: stream to write to
:param Dumper: yaml-dumper to use
:**kwds: Additional keywords
:return: OrderedDict structure
"""
# usage example: ordered_dump(data, Dumper=yaml.SafeDumper)
class OrderedDumper(Dumper):
pass
def _dict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
OrderedDumper.add_representer(OrderedDict, _dict_representer)
return yaml.dump(data, stream, OrderedDumper, **kwds)
def test_s3fetchyaml(mock_s3):
"""Success path all the way through to the mocked boto s3 object."""
input_dict = {'newkey': 'newvalue', 'newkey2': 'newvalue2'}
string_of_yaml = yaml.dump(input_dict, Dumper=yaml.RoundTripDumper)
bunch_of_bytes = bytes(string_of_yaml, 'utf-8')
mock_s3.side_effect = [{'Body': bunch_of_bytes}]
context = Context({
'k1': 'v1',
's3Fetch': {
'serviceName': 'service name',
'methodName': 'method_name',
'clientArgs': {'ck1': 'cv1', 'ck2': 'cv2'},
'methodArgs': {'Bucket': 'bucket name',
'Key': 'key name',
'SSECustomerAlgorithm': 'sse alg',
'SSECustomerKey': 'sse key'}
}})
pypyraws.steps.s3fetchyaml.run_step(context)
assert len(context) == 4
assert context['k1'] == 'v1'
assert context['newkey'] == 'newvalue'
assert context['newkey2'] == 'newvalue2'
def create_dishwasher(name: str, brand: str, cost: int, cve: str) -> str:
try:
query = "INSERT INTO dishwashers VALUES ('{inserted_by}', '{id}', '{object}')"
id = get_new_id()
new_dishwasher = DishWasher(id, name, brand, cost, cve)
if "user" in request.cookies:
inserted_by = base64.b64decode(request.cookies["user"]).decode('utf-8')
else:
inserted_by = "no one :("
if len(inserted_by) > 255:
return ""
for c in inserted_by:
if c not in string.printable[:-2]:
return ""
if re.search(r"sleep", inserted_by, flags=re.IGNORECASE):
return ""
if re.search(r"benchmark", inserted_by, flags=re.IGNORECASE):
return ""
if re.search(r"wait", inserted_by, flags=re.IGNORECASE):
return ""
if insert(query.format(id=id, object=yaml.dump(new_dishwasher), inserted_by=inserted_by)):
return id
except Exception as e:
print(e, file=sys.stderr)
return ""
def __init__(self, *args, **kwargs): # optional, default_release_types,
super().__init__(*args, **kwargs)
data_path = kwargs['data_path']
self.addon_data = self.get_addon_data()
if self.dump_data:
key = 'categorySection.name'
for addon_type, addons in groupby(sorted(self.addon_data, key=lambda k: k[key]), lambda d: d[key]):
path = Path(data_path, 'addons', f'{addon_type}.yaml')
addon_data = dict()
for addon in addons:
website_url = addon['websiteURL']
addon_id = addon['id']
api_url = f'{self.meta_url}/api/addon/{addon_id}'
addon_data[addon['name']] = {
'webste_url': website_url, 'api_url': api_url}
Path(path.parent).mkdir(parents=True, exist_ok=True)
with open(path, 'w') as outfile:
yaml.dump(addon_data, outfile, default_flow_style=False)
def save_yaml(fname, wf, inline, pack, relpath, wd, encoding='utf-8'):
with codecs.open(fname, 'wb', encoding=encoding) as yaml_file:
yaml_file.write('#!/usr/bin/env cwl-runner\n')
yaml_file.write(yaml.dump(wf.to_obj(inline=inline,
pack=pack,
relpath=relpath,
wd=wd),
Dumper=yaml.RoundTripDumper))
def save(self, path: str):
cfg = self._dict_to_orderdict(self.cfg_dict)
with open(path[:-3] + 'yaml', 'w') as cfg_file:
try:
yaml3ed.dump(cfg, cfg_file, explicit_start=True, explicit_end=True,
default_flow_style=False, allow_unicode=True, version=(1, 2),
indent=2)
except yaml3ed.YAMLError as exc:
print(exc)
def save(self, path: str):
cfg = self._dict_to_orderdict(self.map_config)
with open(path + '/' + self.map_data['name'] + '.yaml', 'w') as cfg_file:
try:
yaml3ed.dump(cfg, cfg_file, explicit_start=True, explicit_end=True,
default_flow_style=False, allow_unicode=True, version=(1, 2))
except yaml3ed.YAMLError as exc:
print(exc)
def dump(self):
return ydump(self.data, Dumper=RoundTripDumper, default_flow_style=False)
def list():
"""
List all mod configuration
"""
config_path = get_default_config_path()
config = load_config(config_path, loader=yaml.RoundTripLoader)
print(yaml.dump(config['mod'], Dumper=yaml.RoundTripDumper))
def dump_config(config_path, config, dumper=yaml.RoundTripDumper):
with codecs.open(config_path, mode='w', encoding='utf-8') as file:
file.write(yaml.dump(config, Dumper=dumper))
def dump_config(config_path, config, dumper=yaml.RoundTripDumper):
with codecs.open(config_path, mode='w', encoding='utf-8') as stream:
stream.write(to_utf8(yaml.dump(config, Dumper=dumper)))
def write(self):
with open(self.filename+".tmp", 'w') as fid:
yaml.dump(self.data, fid, Dumper=yaml.RoundTripDumper)
move(self.filename+".tmp", self.filename)
def save_config():
def do_save_config():
with open(_CONFIG_FILE, 'w', encoding='utf-8') as file:
yaml.dump(config, file, Dumper=yaml.RoundTripDumper)
await get_event_loop().run_in_executor(None, do_save_config)
def save(data, stream=None):
return ruamel.yaml.dump(data, stream=stream,
Dumper=ruamel.yaml.RoundTripDumper,
default_flow_style=False)
def save(data, stream=None, Dumper=yaml.SafeDumper,
default_flow_style=False,
encoding='utf-8',
**kwds):
class OrderedDumper(Dumper):
pass
def _dict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
OrderedDumper.add_representer(OrderedDict, _dict_representer)
OrderedDumper.add_representer(np.float64,
lambda dumper, data: dumper.represent_float(float(data)))
OrderedDumper.add_representer(complex,
lambda dumper, data: dumper.represent_str(str(data)))
OrderedDumper.add_representer(np.complex128,
lambda dumper, data: dumper.represent_str(str(data)))
OrderedDumper.add_representer(np.ndarray,
lambda dumper, data: dumper.represent_list(list(data)))
# I added the following two lines to make pyrpl compatible with pyinstruments. In principle they can be erased
if isinstance(data, dict) and not isinstance(data, OrderedDict):
data = OrderedDict(data)
return yaml.dump(data,
stream=stream,
Dumper=OrderedDumper,
default_flow_style=default_flow_style,
encoding=encoding,
**kwds)
# usage example:
# load(stream, yaml.SafeLoader)
# save(data, stream=f, Dumper=yaml.SafeDumper)
def to_file(self, file_loc):
"""
Dumps the configuration to a YAML file in the specified location.
This will not reflect any runtime modifications to the configuration
object.
"""
with open(file_loc, 'w') as yf:
yaml.dump(self._base_dict, stream=yf, default_flow_style=False)
def task_config(args):
config_dict = _get_config_or_die(
calling_task='config',
required_params=[]
)
print(os.linesep.join((
'### yaml ###',
'',
yaml.dump(config_dict, Dumper=yaml.RoundTripDumper, indent=4),
'### /yaml ###'
)))
def test_fileloading(self, sample_yaml):
# Cannot use tempfile.NamedTemporaryFile because of Windows's file locks
fd, fname = tempfile.mkstemp('w')
try:
with open(fd, 'w', encoding='utf-8') as f:
ryaml.dump(sample_yaml, f)
config = ShanghaiConfiguration.from_filename(fname)
finally:
os.remove(fname)
assert config.mapping == sample_yaml
def set_score_config(exercise_id, config_path, exercise_folder):
config = Config.load(Path.cwd() / (config_path or "import-config.yml"))
api = ApiClient(config.api_url, config.api_token)
tests = load_codex_test_config(Path(exercise_folder) / "testdata" / "config")
score_config = {test.name: int(test.points) for test in tests}
api.set_exercise_score_config(exercise_id, yaml.dump({"testWeights": score_config}, default_flow_style=False))
def dump_schema(self, schema):
print('Writing schema json to file')
with open(self.json_file, 'w') as f:
json.dump(schema, f, indent=4)
# templates
def create_user_says_skeleton(self):
template = os.path.join(self.template_dir, 'user_says.yaml')
skeleton = {}
for intent in self.assist._intent_action_funcs:
# print(type(intent))
entity_map_from_action = self.assist._intent_mappings.get(intent, {})
d = yaml.compat.ordereddict()
d['UserSays'] = [None, None]
d['Annotations'] = [None, None]
# d['Annotations'] = self.parse_annotations_from_action_mappings(intent)
data = yaml.comments.CommentedMap(d) # to preserve order w/o tags
skeleton[intent] = data
with open(template, 'a') as f:
f.write('# Template for defining UserSays examples\n\n')
f.write('# give-color-intent:\n\n')
f.write('# UserSays:\n')
f.write('# - My color is blue\n')
f.write('# - red is my favorite color\n\n')
f.write('# Annotations:\n')
f.write('# - blue: sys.color # maps param value -> entity\n')
f.write('# - red: sys.color\n\n\n\n')
# f.write(header)
yaml.dump(skeleton, f, default_flow_style=False, Dumper=yaml.RoundTripDumper)