Commit cf62d385 authored by Ole Streicher's avatar Ole Streicher

New upstream version 2.1.0

parent 9071af48
2.1.0 (2018-09-25)
------------------
- Add API function for retrieving history entries. [#501]
- Store ASDF-in-FITS data inside a 1x1 BINTABLE HDU. [#519]
- Allow implicit conversion of ``namedtuple`` into serializable types. [#534]
- Fix bug that prevented use of ASDF-in-FITS with HDUs that have names with
underscores. [#543]
- Add option to ``generic_io.get_file`` to close underlying file handle. [#544]
- Add top-level ``keys`` method to ``AsdfFile`` to access tree keys. [#545]
2.0.3 (2018-09-06)
------------------
......
Metadata-Version: 1.2
Name: asdf
Version: 2.0.3
Version: 2.1.0
Summary: Python tools to handle ASDF files
Home-page: http://github.com/spacetelescope/asdf
Author: Erik Bray, Dan D'Avella, Michael Droettboom
......
......@@ -42,12 +42,4 @@ from .tags.core.external_reference import ExternalArrayReference
from jsonschema import ValidationError
try:
from astropy.io import fits
except ImportError:
pass
else:
from .fits_embed import _AsdfHDU
fits.register_hdu(_AsdfHDU)
open = AsdfFile.open
......@@ -49,8 +49,9 @@ class AsdfFile(versioning.VersionedMixin):
The main class that represents an ASDF file object.
"""
def __init__(self, tree=None, uri=None, extensions=None, version=None,
ignore_version_mismatch=True, ignore_unrecognized_tag=False,
copy_arrays=False, custom_schema=None):
ignore_version_mismatch=True, ignore_unrecognized_tag=False,
ignore_implicit_conversion=False, copy_arrays=False,
custom_schema=None):
"""
Parameters
----------
......@@ -81,6 +82,12 @@ class AsdfFile(versioning.VersionedMixin):
When `True`, do not raise warnings for unrecognized tags. Set to
`False` by default.
ignore_implicit_conversion : bool
When `True`, do not raise warnings when types in the tree are
implicitly converted into a serializable object. The motivating
case for this is currently `namedtuple`, which cannot be serialized
as-is.
copy_arrays : bool, optional
When `False`, when reading files, attempt to memmap underlying data
arrays when possible.
......@@ -90,6 +97,7 @@ class AsdfFile(versioning.VersionedMixin):
validation pass. This can be used to ensure that particular ASDF
files follow custom conventions beyond those enforced by the
standard.
"""
if custom_schema is not None:
......@@ -104,6 +112,7 @@ class AsdfFile(versioning.VersionedMixin):
self._process_extensions(extensions)
self._ignore_version_mismatch = ignore_version_mismatch
self._ignore_unrecognized_tag = ignore_unrecognized_tag
self._ignore_implicit_conversion = ignore_implicit_conversion
self._file_format_version = None
......@@ -374,6 +383,9 @@ class AsdfFile(versioning.VersionedMixin):
self._validate(asdf_object, custom=bool(tree))
self._tree = asdf_object
def keys(self):
return self._tree.keys()
def __getitem__(self, key):
return self._tree[key]
......@@ -1068,9 +1080,12 @@ class AsdfFile(versioning.VersionedMixin):
Finds all external "JSON References" in the tree and converts
them to `reference.Reference` objects.
"""
# Set directly to self._tree, since it doesn't need to be
# re-validated.
self._tree = reference.find_references(self._tree, self)
# Since this is the first place that the tree is processed when
# creating a new ASDF object, this is where we pass the option to
# ignore warnings about implicit type conversions.
# Set directly to self._tree, since it doesn't need to be re-validated.
self._tree = reference.find_references(self._tree, self,
ignore_implicit_conversion=self._ignore_implicit_conversion)
def resolve_references(self, do_not_fill_defaults=False):
"""
......@@ -1217,6 +1232,31 @@ class AsdfFile(versioning.VersionedMixin):
self.tree['history']['entries'].pop()
raise
def get_history_entries(self):
"""
Get a list of history entries from the file object.
Returns
-------
entries : list
A list of history entries.
"""
if 'history' not in self.tree:
return []
if isinstance(self.tree['history'], list):
return self.tree['history']
if 'entries' in self.tree['history']:
return self.tree['history']['entries']
return []
# Inherit docstring from dictionary
AsdfFile.keys.__doc__ = dict.keys.__doc__
def is_asdf_file(fd):
"""
......
......@@ -17,8 +17,7 @@ from . import generic_io
try:
from astropy.io import fits
from astropy.io.fits.file import _File
from astropy.io.fits.header import Header, _pad_length
from astropy.io.fits import Column, BinTableHDU
except ImportError:
raise ImportError("AsdfInFits requires astropy")
......@@ -30,72 +29,6 @@ FITS_SOURCE_PREFIX = 'fits:'
__all__ = ['AsdfInFits']
class _AsdfHDU(fits.hdu.base.NonstandardExtHDU):
"""
A non-standard extension HDU for encapsulating an entire ASDF file within a
single HDU of a container FITS file. These HDUs have an extension (that is
an XTENSION keyword) of ASDF.
"""
_extension = ASDF_EXTENSION_NAME
@classmethod
def from_buff(cls, buff, compress=False, **kwargs):
"""
Creates a new _AsdfHDU from a given AsdfFile object.
Parameters
----------
buff : io.BytesIO
A buffer containing an ASDF metadata tree
compress : bool, optional
Gzip compress the contents of the ASDF HDU
"""
if compress:
buff = gzip.GzipFile(fileobj=buff, mode='wb')
# A proper HDU should still be padded out to a multiple of 2880
# technically speaking
data_length = buff.tell()
padding = (_pad_length(data_length) * cls._padding_byte).encode('ascii')
buff.write(padding)
buff.seek(0)
cards = [
('XTENSION', cls._extension, 'ASDF extension'),
('BITPIX', 8, 'array data type'),
('NAXIS', 1, 'number of array dimensions'),
('NAXIS1', data_length, 'Axis length'),
('PCOUNT', 0, 'number of parameters'),
('GCOUNT', 1, 'number of groups'),
('COMPRESS', compress, 'Uses gzip compression'),
('EXTNAME', cls._extension, 'Name of ASDF extension'),
]
header = Header(cards)
return cls._readfrom_internal(_File(buff), header=header)
@classmethod
def match_header(cls, header):
card = header.cards[0]
if card.keyword != 'XTENSION':
return False
xtension = card.value
if isinstance(xtension, str):
xtension = xtension.rstrip()
return xtension == cls._extension
# TODO: Add header verification
def _summary(self):
# TODO: Perhaps make this more descriptive...
axes = tuple(self.data.shape)
return (self.name, self.ver, 'AsdfHDU', len(self._header), axes)
class _FitsBlock(object):
def __init__(self, hdu):
self._hdu = hdu
......@@ -125,10 +58,10 @@ class _EmbeddedBlockManager(block.BlockManager):
super(_EmbeddedBlockManager, self).__init__(asdffile)
def get_block(self, source):
if (isinstance(source, str) and
source.startswith(FITS_SOURCE_PREFIX)):
if (isinstance(source, str) and source.startswith(FITS_SOURCE_PREFIX)):
parts = re.match(
'((?P<name>[A-Z0-9]+),)?(?P<ver>[0-9]+)',
# All printable ASCII characters are allowed in EXTNAME
'((?P<name>[ -~]+),)?(?P<ver>[0-9]+)',
source[len(FITS_SOURCE_PREFIX):])
if parts is not None:
ver = int(parts.group('ver'))
......@@ -330,7 +263,10 @@ class AsdfInFits(asdf.AsdfFile):
array = np.frombuffer(buff.getvalue(), np.uint8)
return fits.ImageHDU(array, name=ASDF_EXTENSION_NAME)
else:
return _AsdfHDU.from_buff(buff)
data = np.array(buff.getbuffer(), dtype=np.uint8)[None, :]
fmt = '{}B'.format(len(data[0]))
column = fits.Column(array=data, format=fmt, name='ASDF_METADATA')
return fits.BinTableHDU.from_columns([column], name=ASDF_EXTENSION_NAME)
def _update_asdf_extension(self, all_array_storage=None,
all_array_compression=None, auto_inline=None,
......
......@@ -1104,7 +1104,7 @@ def get_uri(file_obj):
return getattr(file_obj, 'name', '')
def get_file(init, mode='r', uri=None):
def get_file(init, mode='r', uri=None, close=False):
"""
Returns a `GenericFile` instance suitable for wrapping the given
object `init`.
......@@ -1146,6 +1146,10 @@ def get_file(init, mode='r', uri=None):
`init` refers to a regular filesystem file. It is not required
if URI resolution is not used in the file.
close : bool
If ``True``, closes the underlying file handle when this object is
closed. Defaults to ``False``.
Returns
-------
fd : GenericFile
......@@ -1209,16 +1213,16 @@ def get_file(init, mode='r', uri=None):
else:
init2 = init
if isinstance(init2, io.RawIOBase):
result = RealFile(init2, mode, uri=uri)
result = RealFile(init2, mode, uri=uri, close=close)
else:
result = MemoryIO(init2, mode, uri=uri)
result._secondary_fd = init
return result
else:
if mode == 'w':
return OutputStream(init, uri=uri)
return OutputStream(init, uri=uri, close=close)
elif mode == 'r':
return InputStream(init, mode, uri=uri)
return InputStream(init, mode, uri=uri, close=close)
else:
raise ValueError(
"File '{0}' could not be opened in 'rw' mode".format(init))
......@@ -1243,10 +1247,10 @@ def get_file(init, mode='r', uri=None):
return MemoryIO(init, mode, uri=uri)
elif mode == 'w' and hasattr(init, 'write'):
return OutputStream(init, uri=uri)
return OutputStream(init, uri=uri, close=close)
elif mode == 'r' and hasattr(init, 'read'):
return InputStream(init, mode, uri=uri)
return InputStream(init, mode, uri=uri, close=close)
raise ValueError("Can't handle '{0}' as a file for mode '{1}'".format(
init, mode))
......@@ -128,7 +128,7 @@ class Reference(AsdfType):
pass
def find_references(tree, ctx):
def find_references(tree, ctx, ignore_implicit_conversion=False):
"""
Find all of the JSON references in the tree, and convert them into
`Reference` objects.
......@@ -138,7 +138,8 @@ def find_references(tree, ctx):
return Reference(tree['$ref'], json_id, asdffile=ctx)
return tree
return treeutil.walk_and_modify(tree, do_find)
return treeutil.walk_and_modify(
tree, do_find, ignore_implicit_conversion=ignore_implicit_conversion)
def resolve_references(tree, ctx, do_not_fill_defaults=False):
......
......@@ -14,6 +14,7 @@ from asdf import util
from asdf import asdftypes
from asdf.tests import helpers
from asdf.tests.helpers import yaml_to_asdf, display_warnings
from asdf.tags.core import HistoryEntry
SCHEMA_PATH = os.path.join(os.path.dirname(helpers.__file__), 'data')
......@@ -83,6 +84,14 @@ def test_history_to_file(tmpdir):
assert entry['software']['name'] == 'my_tool'
assert entry['software']['version'] == '2.0'
# Test the history entry retrieval API
entries = ff.get_history_entries()
assert len(entries) == 1
assert isinstance(entries, list)
assert isinstance(entries[0], HistoryEntry)
assert entries[0]['description'] == "This happened"
assert entries[0]['software']['name'] == 'my_tool'
def test_old_history(tmpdir):
"""Make sure that old versions of the history format are still accepted"""
......@@ -100,6 +109,29 @@ history:
with asdf.open(buff) as af:
assert len(af.tree['history']) == 1
# Test the history entry retrieval API
entries = af.get_history_entries()
assert len(entries) == 1
assert isinstance(entries, list)
assert isinstance(entries[0], HistoryEntry)
assert entries[0]['description'] == "Here's a test of old history entries"
assert entries[0]['software']['name'] == 'foo'
def test_get_history_entries(tmpdir):
"""
Test edge cases for the get_history_entries API. Other cases tested above
"""
tmpfile = str(tmpdir.join('empty.asdf'))
with asdf.AsdfFile() as af:
af.write_to(tmpfile)
# Make sure this works when there is no history section at all
with asdf.open(tmpfile) as af:
assert len(af['history']['extensions']) > 0
assert len(af.get_history_entries()) == 0
def test_extension_metadata(tmpdir):
......
......@@ -14,18 +14,18 @@ frames:
- !wcs/celestial_frame-1.1.0
axes_names: [lon, lat]
name: CelestialFrame
reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5}
reference_frame: {equinox: !time/time-1.1.0 '2018-01-01 00:00:00.000', type: FK5}
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
axes_names: [lon, lat]
name: CelestialFrame
reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01
reference_frame: {equinox: !time/time-1.1.0 '2018-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01
00:00:00.000', type: FK4}
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
axes_names: [lon, lat]
name: CelestialFrame
reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01
reference_frame: {equinox: !time/time-1.1.0 '2018-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01
00:00:00.000', type: FK4_noeterms}
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
......@@ -64,19 +64,19 @@ frames:
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0}
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0}
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0}
obstime: !time/time-1.1.0 2010-01-01 00:00:00.000
obstime: !time/time-1.1.0 2018-01-01 00:00:00.000
type: GCRS
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
axes_names: [lon, lat]
name: CelestialFrame
reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS}
reference_frame: {obstime: !time/time-1.1.0 '2018-01-01 00:00:00.000', type: CIRS}
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
axes_names: [x, y, z]
axes_order: [0, 1, 2]
name: CelestialFrame
reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS}
reference_frame: {obstime: !time/time-1.1.0 '2018-01-03 00:00:00.000', type: ITRS}
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
- !wcs/celestial_frame-1.1.0
axes_names: [lon, lat]
......@@ -91,7 +91,7 @@ frames:
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0}
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0}
- !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0}
obstime: !time/time-1.1.0 2010-01-01 00:00:00.000
obstime: !time/time-1.1.0 2018-01-01 00:00:00.000
type: precessed_geocentric
unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg]
...
......@@ -9,6 +9,12 @@ from functools import partial
gwcs = pytest.importorskip('gwcs')
astropy = pytest.importorskip('astropy', minversion='3.0.0')
_gwcs_version = gwcs.version.version
_astropy_version = astropy.version.version
INCOMPATIBLE_VERSIONS = _gwcs_version == '0.9.0' and _astropy_version < '3.1.dev0'
from astropy.modeling import models
from astropy import coordinates as coord
from astropy import units as u
......@@ -39,6 +45,7 @@ def test_read_wcs(version):
assert isinstance(tree['gw3'], wcs.WCS)
@pytest.mark.skipif(INCOMPATIBLE_VERSIONS, reason="Incompatible versions for GWCS and Astropy")
@pytest.mark.parametrize('version', ['1.0.0', '1.1.0', '1.2.0'])
def test_composite_frame(tmpdir, version):
icrs = coord.ICRS()
......
......@@ -144,7 +144,8 @@ def assert_tree_match(old_tree, new_tree, ctx=None,
def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
raw_yaml_check_func=None, write_options={}, extensions=None,
raw_yaml_check_func=None, write_options={},
init_options={}, extensions=None,
tree_match_func='assert_equal'):
"""
Assert that a given tree saves to ASDF and, when loaded back,
......@@ -171,7 +172,7 @@ def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
# First, test writing/reading a BytesIO buffer
buff = io.BytesIO()
AsdfFile(tree, extensions=extensions).write_to(buff, **write_options)
AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options)
assert not buff.closed
buff.seek(0)
with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff:
......@@ -184,7 +185,7 @@ def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
asdf_check_func(ff)
buff.seek(0)
ff = AsdfFile(extensions=extensions)
ff = AsdfFile(extensions=extensions, **init_options)
content = AsdfFile._open_impl(ff, buff, _get_yaml_content=True)
buff.close()
# We *never* want to get any raw python objects out
......@@ -195,7 +196,7 @@ def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
raw_yaml_check_func(content)
# Then, test writing/reading to a real file
ff = AsdfFile(tree, extensions=extensions)
ff = AsdfFile(tree, extensions=extensions, **init_options)
ff.write_to(fname, **write_options)
with AsdfFile.open(fname, mode='rw', extensions=extensions) as ff:
assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func)
......@@ -205,7 +206,7 @@ def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
# Make sure everything works without a block index
write_options['include_block_index'] = False
buff = io.BytesIO()
AsdfFile(tree, extensions=extensions).write_to(buff, **write_options)
AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options)
assert not buff.closed
buff.seek(0)
with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff:
......@@ -219,7 +220,7 @@ def assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None,
if not INTERNET_OFF and not sys.platform.startswith('win'):
server = RangeHTTPServer()
try:
ff = AsdfFile(tree, extensions=extensions)
ff = AsdfFile(tree, extensions=extensions, **init_options)
ff.write_to(os.path.join(server.tmpdir, 'test.asdf'), **write_options)
with AsdfFile.open(server.url + 'test.asdf', mode='r',
extensions=extensions) as ff:
......
......@@ -57,6 +57,8 @@ def test_embed_asdf_in_fits_file(tmpdir, backwards_compat):
hdulist = fits.HDUList()
hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI'))
hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ'))
# Test a name with underscores to make sure it works
hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='WITH_UNDERSCORE'))
tree = {
'model': {
......@@ -67,6 +69,10 @@ def test_embed_asdf_in_fits_file(tmpdir, backwards_compat):
'dq': {
'data': hdulist['DQ'].data,
'wcs': 'WCS info'
},
'with_underscore': {
'data': hdulist['WITH_UNDERSCORE'].data,
'wcs': 'WCS info'
}
}
}
......@@ -75,8 +81,8 @@ def test_embed_asdf_in_fits_file(tmpdir, backwards_compat):
ff.write_to(fits_testfile, use_image_hdu=backwards_compat)
with fits.open(fits_testfile) as hdulist2:
assert len(hdulist2) == 3
assert [x.name for x in hdulist2] == ['SCI', 'DQ', 'ASDF']
assert len(hdulist2) == 4
assert [x.name for x in hdulist2] == ['SCI', 'DQ', 'WITH_UNDERSCORE', 'ASDF']
assert_array_equal(hdulist2[0].data, np.arange(512, dtype=np.float))
asdf_hdu = hdulist2['ASDF']
assert asdf_hdu.data.tostring().startswith(b'#ASDF')
......@@ -86,7 +92,7 @@ def test_embed_asdf_in_fits_file(tmpdir, backwards_compat):
assert isinstance(asdf_hdu, fits.ImageHDU)
assert asdf_hdu.data.tostring().strip().endswith(b'...')
else:
assert isinstance(asdf_hdu, fits_embed._AsdfHDU)
assert isinstance(asdf_hdu, fits.BinTableHDU)
with fits_embed.AsdfInFits.open(hdulist2) as ff2:
assert_tree_match(tree, ff2.tree)
......@@ -110,7 +116,7 @@ def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir):
assert len(hdulist) == 4
assert [x.name for x in hdulist] == ['PRIMARY', '', '', 'ASDF']
asdf_hdu = hdulist['ASDF']
assert isinstance(asdf_hdu, fits_embed._AsdfHDU)
assert isinstance(asdf_hdu, fits.BinTableHDU)
assert asdf_hdu.data.tostring().startswith(b'#ASDF')
with fits_embed.AsdfInFits.open(hdulist) as ff2:
......@@ -378,6 +384,15 @@ def test_extension_check():
with asdf.AsdfFile.open(testfile, strict_extension_check=True) as ff:
pass
def test_verify_with_astropy(tmpdir):
tmpfile = str(tmpdir.join('asdf.fits'))
with create_asdf_in_fits() as aif:
aif.write_to(tmpfile)
with fits.open(tmpfile) as hdu:
hdu.verify('exception')
def test_dangling_file_handle(tmpdir):
"""
This tests the bug fix introduced in #533. Without the bug fix, this test
......
......@@ -91,17 +91,15 @@ def test_open2(tree, tmpdir):
path = os.path.join(str(tmpdir), 'test.asdf')
def get_write_fd():
f = generic_io.get_file(open(path, 'wb'), mode='w')
f = generic_io.get_file(open(path, 'wb'), mode='w', close=True)
assert isinstance(f, generic_io.RealFile)
assert f._uri == util.filepath_to_url(path)
f._close = True
return f
def get_read_fd():
f = generic_io.get_file(open(path, 'rb'), mode='r')
f = generic_io.get_file(open(path, 'rb'), mode='r', close=True)
assert isinstance(f, generic_io.RealFile)
assert f._uri == util.filepath_to_url(path)
f._close = True
return f
with _roundtrip(tree, get_write_fd, get_read_fd) as ff:
......@@ -151,17 +149,15 @@ def test_io_open(tree, tmpdir):
path = os.path.join(str(tmpdir), 'test.asdf')
def get_write_fd():
f = generic_io.get_file(io.open(path, 'wb'), mode='w')
f = generic_io.get_file(io.open(path, 'wb'), mode='w', close=True)
assert isinstance(f, generic_io.RealFile)
assert f._uri == util.filepath_to_url(path)
f._close = True
return f
def get_read_fd():
f = generic_io.get_file(io.open(path, 'r+b'), mode='rw')
f = generic_io.get_file(io.open(path, 'r+b'), mode='rw', close=True)
assert isinstance(f, generic_io.RealFile)
assert f._uri == util.filepath_to_url(path)
f._close = True
return f
with _roundtrip(tree, get_write_fd, get_read_fd) as ff:
......@@ -170,6 +166,22 @@ def test_io_open(tree, tmpdir):
ff.tree['science_data'][0] = 42
def test_close_underlying(tmpdir):
path = os.path.join(str(tmpdir), 'test.asdf')
with generic_io.get_file(open(path, 'wb'), mode='w', close=True) as ff:
pass
assert ff.is_closed() == True
assert ff._fd.closed == True
with generic_io.get_file(open(path, 'rb'), close=True) as ff2:
pass
assert ff2.is_closed() == True
assert ff2._fd.closed == True
def test_bytes_io(tree):
buff = io.BytesIO()
......
......@@ -1154,6 +1154,12 @@ def test_top_level_tree(small_tree):
assert_tree_match(ff2.tree['tree'], ff2['tree'])
def test_top_level_keys(small_tree):
tree = {'tree': small_tree}
ff = asdf.AsdfFile(tree)
assert ff.tree.keys() == ff.keys()
def test_tag_to_schema_resolver_deprecation():
ff = asdf.AsdfFile()
with pytest.warns(AsdfDeprecationWarning):
......
......@@ -167,6 +167,30 @@ required: [foobar]
schema.check_schema(schema_tree)
def test_load_schema_with_file_url(tmpdir):
schema_def = """
%YAML 1.1
%TAG !asdf! tag:stsci.edu:asdf/
---
$schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0"
id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0"
tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0"
type: object
properties:
foobar:
$ref: "{}"
required: [foobar]
...
""".format(resolver.default_resolver('tag:stsci.edu:asdf/core/ndarray-1.0.0'))
schema_path = tmpdir.join('nugatory.yaml')
schema_path.write(schema_def.encode())
schema_tree = schema.load_schema(str(schema_path), resolve_references=True)
schema.check_schema(schema_tree)
def test_schema_caching():
# Make sure that if we request the same URL, we get the *exact
# same* object, to ensure the cache is working.
......@@ -470,6 +494,82 @@ def test_large_literals():
print(buff.getvalue())
def test_nested_array():
s = {
'type': 'object',
'properties': {
'stuff': {
'type': 'array',
'items': {
'type': 'array',
'items': [
{ 'type': 'integer' },
{ 'type': 'string' },
{ 'type': 'number' },
],
'minItems': 3,
'maxItems': 3
}
}
}
}
good = dict(stuff=[[1, 'hello', 2], [4, 'world', 9.7]])
schema.validate(good, schema=s)
bads = [
dict(stuff=[[1, 2, 3]]),
dict(stuff=[12,'dldl']),
dict(stuff=[[12, 'dldl']]),
dict(stuff=[[1, 'hello', 2], [4, 5]]),
dict(stuff=[[1, 'hello', 2], [4, 5, 6]])
]
for b in bads:
with pytest.raises(ValidationError):
schema.validate(b, schema=s)
def test_nested_array_yaml(tmpdir):
schema_def = """
%YAML 1.1
---
type: object
properties:
stuff:
type: array
items:
type: array
items: