Commit d94f4f6b authored by Ole Streicher's avatar Ole Streicher

New upstream version 2.0.2

parent 4576441c
2.0.2 (2018-07-27)
------------------
- Allow serialization of broadcasted ``numpy`` arrays. [#507]
- Fix bug that caused result of ``set_array_compression`` to be overwritten by
``all_array_compression`` argument to ``write_to``. [#510]
- Add workaround for Python OSX write limit bug
(see https://bugs.python.org/issue24658). [#521]
- Fix bug with custom schema validation when using out-of-line definitions in
schema file. [#522]
2.0.1 (2018-05-08)
------------------
......
Metadata-Version: 1.2
Name: asdf
Version: 2.0.1
Version: 2.0.2
Summary: Python tools to handle ASDF files
Home-page: http://github.com/spacetelescope/asdf
Author: Erik Bray, Dan D'Avella, Michael Droettboom
......
......@@ -93,7 +93,7 @@ class AsdfFile(versioning.VersionedMixin):
"""
if custom_schema is not None:
self._custom_schema = schema.load_schema(custom_schema)
self._custom_schema = schema.load_custom_schema(custom_schema)
schema.check_schema(self._custom_schema)
else:
self._custom_schema = None
......
......@@ -551,7 +551,10 @@ class BlockManager(object):
self.set_array_storage(block, all_array_storage)
all_array_compression = getattr(ctx, '_all_array_compression', 'input')
block.output_compression = all_array_compression
# Only override block compression algorithm if it wasn't explicitly set
# by AsdfFile.set_array_compression.
if all_array_compression != 'input':
block.output_compression = all_array_compression
auto_inline = getattr(ctx, '_auto_inline', None)
if auto_inline:
......
# Licensed under a 3-clause BSD style license - see LICENSE.rst
......@@ -4,25 +4,28 @@
import os
import io
from functools import partial
import numpy as np
import pytest
from ... import AsdfFile
from .. import main, diff
from ...tests import helpers
from .. import main, diff
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
from . import data as test_data
get_test_data_path = partial(helpers.get_test_data_path, module=test_data)
def _assert_diffs_equal(filenames, result_file, minimal=False):
iostream = io.StringIO()
file_paths = ["{}/{}".format(TEST_DATA_PATH, name) for name in filenames]
file_paths = [get_test_data_path(name) for name in filenames]
diff(file_paths, minimal=minimal, iostream=iostream)
iostream.seek(0)
result_path = "{}/{}".format(TEST_DATA_PATH, result_file)
result_path = get_test_data_path(result_file)
with open(result_path, 'r') as handle:
assert handle.read() == iostream.read()
......@@ -46,10 +49,10 @@ def test_file_not_found():
# Try to open files that exist but are not valid asdf
filenames = ['frames.diff', 'blocks.diff']
with pytest.raises(RuntimeError):
diff(["{}/{}".format(TEST_DATA_PATH, name) for name in filenames], False)
diff([get_test_data_path(name) for name in filenames], False)
def test_diff_command():
filenames = ['frames0.asdf', 'frames1.asdf']
paths = ["{}/{}".format(TEST_DATA_PATH, name) for name in filenames]
paths = [get_test_data_path(name) for name in filenames]
assert main.main_from_args(['diff'] + paths) == 0
......@@ -107,7 +107,11 @@ def _array_tofile_simple(fd, write, array):
if sys.platform == 'darwin': # pragma: no cover
def _array_tofile(fd, write, array):
OSX_WRITE_LIMIT = 2 ** 32
# This value is currently set as a workaround for a known bug in Python
# on OSX. Individual writes must be less than 2GB, which necessitates
# the chunk size here if we want it to remain a power of 2.
# See https://bugs.python.org/issue24658.
OSX_WRITE_LIMIT = 2 ** 30
if fd is None or array.nbytes >= OSX_WRITE_LIMIT and array.nbytes % 4096 == 0:
return _array_tofile_chunked(write, array, OSX_WRITE_LIMIT)
return _array_tofile_simple(fd, write, array)
......@@ -372,7 +376,7 @@ class GenericFile(object):
"""
def write_array(self, array):
_array_tofile(None, self.write, array)
_array_tofile(None, self.write, np.ascontiguousarray(array))
def seek(self, offset, whence=0):
"""
......@@ -751,7 +755,7 @@ class RealFile(RandomAccessFile):
arr.flush()
self.fast_forward(len(arr.data))
else:
_array_tofile(self._fd, self._fd.write, arr)
_array_tofile(self._fd, self._fd.write, np.ascontiguousarray(arr))
def can_memmap(self):
return True
......
......@@ -305,7 +305,29 @@ HARDCODED_SCHEMA = {
@lru_cache()
def load_schema(url, resolver=None, resolve_references=False):
def load_custom_schema(url):
# Avoid circular import
from .tags.core import AsdfObject
custom = load_schema(url, resolve_local_refs=True)
core = load_schema(AsdfObject.yaml_tag)
def update(d, u):
from collections import Mapping
for k, v in u.items():
# Respect the property ordering of the core schema
if k == 'propertyOrder' and k in d:
d[k] = u[k] + d[k]
elif isinstance(v, Mapping):
d[k] = update(d.get(k, {}), v)
else:
d[k] = v
return d
return update(custom, core)
@lru_cache()
def load_schema(url, resolver=None, resolve_references=False,
resolve_local_refs=False):
"""
Load a schema from the given URL.
......@@ -322,6 +344,12 @@ def load_schema(url, resolver=None, resolve_references=False):
resolve_references : bool, optional
If `True`, resolve all `$ref` references.
resolve_local_refs : bool, optional
If `True`, resolve all `$ref` references that refer to other objects
within the same schema. This will automatically be handled when passing
`resolve_references=True`, but it may be desirable in some cases to
control local reference resolution separately.
"""
if resolver is None:
resolver = mresolver.default_resolver
......@@ -331,6 +359,20 @@ def load_schema(url, resolver=None, resolve_references=False):
else:
schema, url = loader(url)
# Resolve local references
if resolve_local_refs:
def resolve_local(node, json_id):
if isinstance(node, dict) and '$ref' in node:
ref_url = resolver(node['$ref'])
if ref_url.startswith('#'):
parts = urlparse.urlparse(ref_url)
subschema_fragment = reference.resolve_fragment(
schema, parts.fragment)
return subschema_fragment
return node
schema = treeutil.walk_and_modify(schema, resolve_local)
if resolve_references:
def resolve_refs(node, json_id):
if json_id is None:
......@@ -348,10 +390,13 @@ def load_schema(url, resolver=None, resolve_references=False):
subschema = schema
else:
subschema = load_schema(suburl_path, resolver, True)
subschema_fragment = reference.resolve_fragment(
subschema, fragment)
return subschema_fragment
return node
schema = treeutil.walk_and_modify(schema, resolve_refs)
return schema
......@@ -420,9 +465,7 @@ def validate_large_literals(instance):
"literal in ASDF".format(instance))
def validate(instance, ctx=None, schema={},
validators=None,
*args, **kwargs):
def validate(instance, ctx=None, schema={}, validators=None, *args, **kwargs):
"""
Validate the given instance (which must be a tagged tree) against
the appropriate schema. The schema itself is located using the
......
......@@ -386,14 +386,19 @@ class NDArrayType(AsdfType):
@classmethod
def to_tree(cls, data, ctx):
base = util.get_array_base(data)
block = ctx.blocks.find_or_create_block_for_array(data, ctx)
shape = data.shape
dtype = data.dtype
offset = data.ctypes.data - base.ctypes.data
if data.flags[b'C_CONTIGUOUS']:
strides = None
else:
strides = data.strides
strides = None
if not data.flags.c_contiguous:
# We do not want to encode strides for broadcasted arrays
if not all(data.strides):
data = np.ascontiguousarray(data)
else:
strides = data.strides
block = ctx.blocks.find_or_create_block_for_array(data, ctx)
result = {}
......
# Licensed under a 3-clause BSD style license - see LICENSE.rst
......@@ -21,8 +21,8 @@ from asdf import util
from asdf.tests import helpers, CustomTestType
from asdf.tags.core import ndarray
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
from . import data as test_data
TEST_DATA_PATH = helpers.get_test_data_path('', module=test_data)
# These custom types and the custom extension are here purely for the purpose
......@@ -803,3 +803,9 @@ def test_tagged_object_array(tmpdir):
objdata.flat[i] = Quantity(i, 'angstrom')
helpers.assert_roundtrip_tree({'bizbaz': objdata}, tmpdir)
def test_broadcasted_array(tmpdir):
attrs = np.broadcast_arrays(np.array([10,20]), np.array(10), np.array(10))
tree = {'one': attrs[1] }#, 'two': attrs[1], 'three': attrs[2]}
helpers.assert_roundtrip_tree(tree, tmpdir)
# Licensed under a 3-clause BSD style license - see LICENSE.rst
......@@ -4,6 +4,7 @@
import os
import pytest
import warnings
from functools import partial
gwcs = pytest.importorskip('gwcs')
astropy = pytest.importorskip('astropy', minversion='3.0.0')
......@@ -20,8 +21,8 @@ import asdf
from asdf import AsdfFile
from asdf.tests import helpers
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
from . import data as test_data
get_test_data_path = partial(helpers.get_test_data_path, module=test_data)
@pytest.mark.parametrize('version', ['1.0.0', '1.1.0'])
......@@ -31,7 +32,7 @@ def test_read_wcs(version):
more recent than 1.1.0 since the schemas and tags have moved to Astropy and
GWCS."""
filename = os.path.join(TEST_DATA_PATH, "test_wcs-{}.asdf".format(version))
filename = get_test_data_path("test_wcs-{}.asdf".format(version))
with asdf.open(filename) as tree:
assert isinstance(tree['gw1'], wcs.WCS)
assert isinstance(tree['gw2'], wcs.WCS)
......@@ -68,7 +69,7 @@ def test_frames(tmpdir):
test any subsequent ASDF versions since the schemas and tags for those
frames have moved to Astropy and gwcs."""
filename = os.path.join(TEST_DATA_PATH, "test_frames-1.1.0.asdf")
filename = get_test_data_path("test_frames-1.1.0.asdf")
with asdf.open(filename) as tree:
for frame in tree['frames']:
assert isinstance(frame, cf.CoordinateFrame)
......
# Licensed under a 3-clause BSD style license - see LICENSE.rst
%YAML 1.1
---
$schema: "http://stsci.edu/schemas/yaml-schema/draft-01"
id: "http://stsci.edu/schemas/asdf/core/asdf-1.1.0"
tag: "tag:stsci.edu:asdf/core/asdf-1.1.0"
type: object
properties:
thing:
$ref: "#/definitions/bizbaz"
required: [thing]
additionalProperties: true
definitions:
bizbaz:
type: object
properties:
biz:
type: string
baz:
type: string
......@@ -33,8 +33,30 @@ except ImportError:
INTERNET_OFF = False
__all__ = ['assert_tree_match', 'assert_roundtrip_tree', 'yaml_to_asdf',
'get_file_sizes', 'display_warnings']
if sys.version_info >= (3, 7):
from importlib import resources
else:
try:
import importlib_resources as resources
except ImportError:
resources = None
__all__ = ['get_test_data_path', 'assert_tree_match', 'assert_roundtrip_tree',
'yaml_to_asdf', 'get_file_sizes', 'display_warnings']
def get_test_data_path(name, module=None):
if resources is None:
raise RuntimeError("The importlib_resources package is required to get"
" test data on systems with Python < 3.7")
if module is None:
from . import data as test_data
module = test_data
with resources.path(module, name) as path:
return str(path)
def assert_tree_match(old_tree, new_tree, ctx=None,
......
......@@ -16,7 +16,7 @@ from asdf import versioning
from . import helpers, CustomTestType
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
TEST_DATA_PATH = str(helpers.get_test_data_path(''))
def test_custom_tag():
......
......@@ -155,21 +155,40 @@ def test_input(tmpdir):
def test_none(tmpdir):
tree = _get_large_tree()
tmpfile1 = os.path.join(str(tmpdir), 'test1.asdf')
afile = asdf.AsdfFile(tree)
afile.write_to(tmpfile1)
afile.close()
afile = asdf.AsdfFile.open(tmpfile1)
assert afile.get_array_compression(afile.tree['science_data']) is None
with asdf.AsdfFile(tree) as afile:
afile.write_to(tmpfile1)
tmpfile2 = os.path.join(str(tmpdir), 'test2.asdf')
afile.write_to(tmpfile2, all_array_compression='zlib')
assert afile.get_array_compression(afile.tree['science_data']) == 'zlib'
afile.close()
afile = asdf.AsdfFile.open(tmpfile2)
afile.write_to(tmpfile1, all_array_compression=None)
afile.close()
afile = asdf.AsdfFile.open(tmpfile1)
helpers.assert_tree_match(tree, afile.tree)
assert afile.get_array_compression(afile.tree['science_data']) is None
afile.close()
with asdf.AsdfFile.open(tmpfile1) as afile:
assert afile.get_array_compression(afile.tree['science_data']) is None
afile.write_to(tmpfile2, all_array_compression='zlib')
assert afile.get_array_compression(afile.tree['science_data']) == 'zlib'
with asdf.AsdfFile.open(tmpfile2) as afile:
afile.write_to(tmpfile1, all_array_compression=None)
with asdf.AsdfFile.open(tmpfile1) as afile:
helpers.assert_tree_match(tree, afile.tree)
assert afile.get_array_compression(afile.tree['science_data']) is None
def test_set_array_compression(tmpdir):
tmpfile = os.path.join(str(tmpdir), 'compressed.asdf')
zlib_data = np.array([x for x in range(1000)])
bzp2_data = np.array([x for x in range(1000)])
tree = dict(zlib_data=zlib_data, bzp2_data=bzp2_data)
with asdf.AsdfFile(tree) as af_out:
af_out.set_array_compression(zlib_data, 'zlib')
af_out.set_array_compression(bzp2_data, 'bzp2')
af_out.write_to(tmpfile)
with asdf.open(tmpfile) as af_in:
assert af_in.get_array_compression(af_in.tree['zlib_data']) == 'zlib'
assert af_in.get_array_compression(af_in.tree['bzp2_data']) == 'bzp2'
......@@ -18,10 +18,7 @@ import asdf
from asdf import fits_embed
from asdf import open as asdf_open
from .helpers import assert_tree_match, display_warnings
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
from .helpers import assert_tree_match, display_warnings, get_test_data_path
def create_asdf_in_fits():
......@@ -282,7 +279,7 @@ def test_asdf_open(tmpdir):
compare_asdfs(asdf_in_fits, ff)
def test_open_gzipped():
testfile = os.path.join(TEST_DATA_PATH, 'asdf.fits.gz')
testfile = get_test_data_path('asdf.fits.gz')
# Opening as an HDU should work
with fits.open(testfile) as ff:
......@@ -308,7 +305,7 @@ def test_bad_input(tmpdir):
@pytest.mark.skipif(sys.platform.startswith('win'),
reason='Avoid path manipulation on Windows')
def test_version_mismatch_file():
testfile = os.path.join(TEST_DATA_PATH, 'version_mismatch.fits')
testfile = str(get_test_data_path('version_mismatch.fits'))
with pytest.warns(None) as w:
with asdf.AsdfFile.open(testfile,
......@@ -360,7 +357,7 @@ def test_serialize_table(tmpdir):
assert data._source.startswith('fits:')
def test_extension_check():
testfile = os.path.join(TEST_DATA_PATH, 'extension_check.fits')
testfile = get_test_data_path('extension_check.fits')
with pytest.warns(None) as warnings:
with asdf.AsdfFile.open(testfile) as ff:
......
......@@ -25,9 +25,6 @@ from asdf import yamlutil
from asdf.tests import helpers
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
class CustomExtension:
"""
This is the base class that is used for extensions for custom tag
......@@ -45,7 +42,7 @@ class CustomExtension:
@property
def url_mapping(self):
return [('http://nowhere.org/schemas/custom/',
util.filepath_to_url(TEST_DATA_PATH) +
util.filepath_to_url(helpers.get_test_data_path('')) +
'/{url_suffix}.yaml')]
......@@ -95,7 +92,7 @@ def test_read_json_schema():
This was known to fail on Python 3.5 See issue #314 at
https://github.com/spacetelescope/asdf/issues/314 for more details.
"""
json_schema = os.path.join(TEST_DATA_PATH, 'example_schema.json')
json_schema = helpers.get_test_data_path('example_schema.json')
schema_tree = schema.load_schema(json_schema, resolve_references=True)
schema.check_schema(schema_tree)
......@@ -443,7 +440,7 @@ custom: !<tag:nowhere.org:custom/foreign_tag_reference-1.0.0>
def test_self_reference_resolution():
r = resolver.Resolver(CustomExtension().url_mapping, 'url')
s = schema.load_schema(
os.path.join(TEST_DATA_PATH, 'self_referencing-1.0.0.yaml'),
helpers.get_test_data_path('self_referencing-1.0.0.yaml'),
resolver=r,
resolve_references=True)
assert '$ref' not in repr(s)
......@@ -536,7 +533,7 @@ def test_assert_roundtrip_with_extension(tmpdir):
def test_custom_validation_bad(tmpdir):
custom_schema_path = os.path.join(TEST_DATA_PATH, 'custom_schema.yaml')
custom_schema_path = helpers.get_test_data_path('custom_schema.yaml')
asdf_file = os.path.join(str(tmpdir), 'out.asdf')
# This tree does not conform to the custom schema
......@@ -562,7 +559,7 @@ def test_custom_validation_bad(tmpdir):
def test_custom_validation_good(tmpdir):
custom_schema_path = os.path.join(TEST_DATA_PATH, 'custom_schema.yaml')
custom_schema_path = helpers.get_test_data_path('custom_schema.yaml')
asdf_file = os.path.join(str(tmpdir), 'out.asdf')
# This tree conforms to the custom schema
......@@ -576,3 +573,47 @@ def test_custom_validation_good(tmpdir):
with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff:
pass
def test_custom_validation_with_definitions_good(tmpdir):
custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml')
asdf_file = os.path.join(str(tmpdir), 'out.asdf')
# This tree conforms to the custom schema
tree = {
'thing': { 'biz': 'hello', 'baz': 'world' }
}
with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff:
ff.write_to(asdf_file)
with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff:
pass
def test_custom_validation_with_definitions_bad(tmpdir):
custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml')
asdf_file = os.path.join(str(tmpdir), 'out.asdf')
# This tree does NOT conform to the custom schema
tree = {
'forb': { 'biz': 'hello', 'baz': 'world' }
}
# Creating file without custom schema should pass
with asdf.AsdfFile(tree) as ff:
ff.write_to(asdf_file)
# Creating file with custom schema should fail
with pytest.raises(ValidationError):
with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff:
pass
# Opening file without custom schema should pass
with asdf.open(asdf_file) as ff:
pass
# Opening file with custom schema should fail
with pytest.raises(ValidationError):
with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff:
pass
# Autogenerated by Astropy-affiliated package asdf's setup.py on 2018-05-08 14:49:44
# Autogenerated by Astropy-affiliated package asdf's setup.py on 2018-07-30 12:30:24
from __future__ import unicode_literals
import datetime
......@@ -187,8 +187,8 @@ def _get_repo_path(pathname, levels=None): # pragma: no cover
_packagename = "asdf"
_last_generated_version = "2.0.1"
_last_githash = "e82a4aadeced38a1c7fa518896bf99cd8d9d7628"
_last_generated_version = "2.0.2"
_last_githash = "9b856b85cb57f6bbb26f810ed2eae8b01bc29788"
# Determine where the source code for this module
# lives. If __file__ is not a filesystem path then
......@@ -206,10 +206,10 @@ else:
major = 2
minor = 0
bugfix = 1
bugfix = 2
release = True
timestamp = datetime.datetime(2018, 5, 8, 14, 49, 44)
timestamp = datetime.datetime(2018, 7, 30, 12, 30, 24)
debug = False
try:
......
......@@ -31,7 +31,7 @@ URL = metadata.get('url', '')
builtins._PACKAGE_NAME_ = PACKAGE_NAME
# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
VERSION = '2.0.1'
VERSION = '2.0.2'
# Indicates if this version is a release version
RELEASE = 'dev' not in VERSION
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment