Commit d1c6f519 authored by Ole Streicher's avatar Ole Streicher

New upstream version 1.3.1

parent 70668bd7
1.3.1 (2017-11-02)
------------------
- Relax requirement on ``semantic_version`` version to 2.3.1. [#361]
- Fix bug when retrieving file format version from new ASDF file. [#365]
- Fix bug when duplicating inline arrays. [#370]
- Allow tag references using the tag URI scheme to be resolved in schema files.
[#371]
1.3.0 (2017-10-24)
------------------
......
Metadata-Version: 1.0
Metadata-Version: 1.2
Name: asdf
Version: 1.3.0
Version: 1.3.1
Summary: Python tools to handle ASDF files
Home-page: http://github.com/spacetelescope/asdf
Author: Erik Bray, Michael Droettboom
Author-email: mdroe@stsci.edu
License: BSD
Description: UNKNOWN
Description-Content-Type: UNKNOWN
Description: Advanced Scienctific Data Format (ASDF) is a next generation interchange format for scientific data
Platform: UNKNOWN
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*
......@@ -95,6 +95,8 @@ class AsdfFile(versioning.VersionedMixin):
self._ignore_version_mismatch = ignore_version_mismatch
self._ignore_unrecognized_tag = ignore_unrecognized_tag
self._file_format_version = None
self._fd = None
self._external_asdf_by_uri = {}
self._blocks = block.BlockManager(self, copy_arrays=copy_arrays)
......@@ -139,7 +141,7 @@ class AsdfFile(versioning.VersionedMixin):
@property
def file_format_version(self):
if self._file_format_version is None:
return versioning.AsdfVersion(self.versionspec['FILE_FORMAT'])
return versioning.AsdfVersion(self.version_map['FILE_FORMAT'])
else:
return self._file_format_version
......
......@@ -645,6 +645,10 @@ class BlockManager(object):
block = asdffile.blocks._internal_blocks[0]
self.set_array_storage(block, 'external')
# Handle the case of inline data
elif isinstance(source, list):
block = Block(data=np.array(source), array_storage='inline')
else:
raise TypeError("Unknown source '{0}'".format(source))
......
......@@ -104,7 +104,11 @@ DEFAULT_URL_MAPPING = [
(constants.STSCI_SCHEMA_URI_BASE,
util.filepath_to_url(
os.path.join(SCHEMA_PATH, 'stsci.edu')) +
'/{url_suffix}.yaml')]
'/{url_suffix}.yaml'),
('tag:stsci.edu:asdf/',
util.filepath_to_url(
os.path.join(SCHEMA_PATH, 'stsci.edu')) +
'/asdf/{url_suffix}.yaml')]
default_url_mapping = Resolver(DEFAULT_URL_MAPPING, 'url')
......@@ -361,7 +361,7 @@ def load_schema(url, resolver=None, resolve_references=False):
if json_id is None:
json_id = url
if isinstance(node, dict) and '$ref' in node:
suburl = generic_io.resolve_uri(json_id, node['$ref'])
suburl = generic_io.resolve_uri(json_id, resolver(node['$ref']))
parts = urlparse.urlparse(suburl)
fragment = parts.fragment
if len(fragment):
......
......@@ -211,6 +211,21 @@ def test_auto_inline_recursive(tmpdir):
tree, tmpdir, check_asdf, None, {'auto_inline': 64})
def test_copy_inline():
yaml = """
x0: !core/ndarray-1.0.0
data: [-1.0, 1.0]
"""
buff = helpers.yaml_to_asdf(yaml)
with asdf.AsdfFile.open(buff) as infile:
with asdf.AsdfFile() as f:
f.tree['a'] = infile.tree['x0']
f.tree['b'] = f.tree['a']
f.write_to(io.BytesIO())
def test_table(tmpdir):
table = np.array(
[(0, 1, (2, 3)), (4, 5, (6, 7))],
......
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import io
import os
import re
import yaml
import pytest
import numpy as np
import astropy
from astropy.tests.helper import catch_warnings
import asdf
from asdf.tests import helpers, CustomTestType
from asdf import asdftypes
from asdf import block
from asdf import extension
from asdf import schema
from asdf import treeutil
from asdf import util
from asdf import versioning
try:
import gwcs
HAS_GWCS = True
except ImportError:
HAS_GWCS = False
class LabelMapperTestType(CustomTestType):
version = '1.0.0'
name = 'transform/label_mapper'
class RegionsSelectorTestType(CustomTestType):
version = '1.0.0'
name = 'transform/regions_selector'
class TestExtension(extension.BuiltinExtension):
"""This class defines an extension that represents tags whose
implementations current reside in other repositories (such as GWCS) but
whose schemas are defined in ASDF. This provides a workaround for schema
validation testing since we want to pass without warnings, but the fact
that these tag classes are not defined within ASDF means that warnings
occur unless this extension is used. Eventually these schemas may be moved
out of ASDF and into other repositories, or ASDF will potentially provide
abstract base classes for the tag implementations.
"""
@property
def types(self):
return [LabelMapperTestType, RegionsSelectorTestType]
@property
def tag_mapping(self):
return [('tag:stsci.edu:asdf',
'http://stsci.edu/schemas/asdf{tag_suffix}')]
def generate_schema_list():
"""Returns a generator for all schema files"""
src = os.path.join(os.path.dirname(__file__), '../schemas')
for root, dirs, files in os.walk(src):
for fname in files:
if not fname.endswith('.yaml'):
continue
if os.path.splitext(fname)[0] in (
'draft-01', 'asdf-schema-1.0.0'):
continue
yield os.path.join(root, fname)
def _assert_warnings(_warnings):
if astropy.__version__ < '1.3.3':
# Make sure at most only one warning occurred
assert len(_warnings) <= 1, helpers.display_warnings(_warnings)
# Make sure the warning was the one we expected
if len(_warnings) == 1:
message = str(_warnings[0].message)
target_string = "gwcs and astropy-1.3.3 packages are required"
assert message.startswith('Failed to convert'), \
helpers.display_warnings(_warnings)
assert target_string in str(_warnings[0].message), \
helpers.display_warnings(_warnings)
else:
assert len(_warnings) == 0, helpers.display_warnings(_warnings)
def _find_standard_version(filename):
components = filename[filename.find('schemas') + 1:].split(os.path.sep)
tag = 'tag:{}:{}'.format(components[1], '/'.join(components[2:]))
name, version = asdftypes.split_tag_version(tag.replace('.yaml', ''))
for sv in versioning.supported_versions:
map_version = versioning.get_version_map(sv)['tags'].get(name)
if map_version is not None and version == map_version:
return sv
return versioning.default_version
def generate_example_schemas():
"""Returns a generator for all examples in schema files"""
def find_examples_in_schema(path):
"""Returns generator for all examples in schema at given path"""
with open(path, 'rb') as fd:
schema_tree = yaml.load(fd)
for node in treeutil.iter_tree(schema_tree):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
yield example
for schema_path in generate_schema_list():
for example in find_examples_in_schema(schema_path):
yield (schema_path, example)
def pytest_generate_tests(metafunc):
"""This function is used by pytest to parametrize test function inputs
Parameters:
-----------
metafunc : object returned by pytest to enable test parametrization
This function enables parametrization of the following tests:
test_validate_schema
test_schema_example
The 'yield' functionality in pytest for parametrized tests has been
deprecated. The @pytest.mark.parametrize decorator is not powerful enough
for the kind of programmatic parametrization that we require here.
"""
def get_schema_name(schema_path):
"""Helper function to return the informative part of a schema path"""
path = os.path.normpath(schema_path)
return os.path.sep.join(path.split(os.path.sep)[-3:])
def create_schema_example_id(argval):
"""Helper function to create test ID for schema example validation"""
if argval[0] == '/':
# ID for the first argument is just the schema name
return get_schema_name(argval)
else:
# This will cause pytest to create labels of the form:
# SCHEMA_NAME-example
# If there are multiple examples within a single schema, the
# examples will be numbered automatically to distinguish them
return "example"
if metafunc.function is test_validate_schema:
metafunc.parametrize(
'schema_path',
generate_schema_list(),
# just use the schema name as a test ID instead of full path
ids=get_schema_name)
elif metafunc.function is test_schema_example:
metafunc.parametrize(
'filename,example',
generate_example_schemas(),
ids=create_schema_example_id)
def test_validate_schema(schema_path):
"""Pytest to check validity of schema file at given path
Parameters:
-----------
schema_path : name of the schema file to be validated
This function is called with a range of parameters by pytest's
'parametrize' utility in order to account for all schema files.
"""
# Make sure that each schema itself is valid.
schema_tree = schema.load_schema(schema_path, resolve_references=True)
schema.check_schema(schema_tree)
def test_schema_example(filename, example):
"""Pytest to check validity of a specific example within schema file
Parameters:
-----------
filename : name of the schema file containing example to be tested
example: string representing example
This function is called with a range of parameters by pytest's
'parametrize' utility in order to account for all examples in all schema
files.
"""
if not HAS_GWCS and re.search(r'frame-\d\.\d\.\d\.yaml', filename):
return pytest.skip
standard_version = _find_standard_version(filename)
# Make sure that the examples in the schema files (and thus the
# ASDF standard document) are valid.
buff = helpers.yaml_to_asdf(
'example: ' + example.strip(), standard_version=standard_version)
ff = asdf.AsdfFile(
uri=util.filepath_to_url(os.path.abspath(filename)),
extensions=TestExtension())
# Fake an external file
ff2 = asdf.AsdfFile({'data': np.empty((1024*1024*8), dtype=np.uint8)})
ff._external_asdf_by_uri[
util.filepath_to_url(
os.path.abspath(
os.path.join(
os.path.dirname(filename), 'external.asdf')))] = ff2
# Add some dummy blocks so that the ndarray examples work
for i in range(3):
b = block.Block(np.zeros((1024*1024*8), dtype=np.uint8))
b._used = True
ff.blocks.add(b)
b._array_storage = "streamed"
try:
with catch_warnings() as w:
ff._open_impl(ff, buff)
# Do not tolerate any warnings that occur during schema validation,
# other than a few that we expect to occur under certain circumstances
_assert_warnings(w)
except:
print("From file:", filename)
raise
# Just test we can write it out. A roundtrip test
# wouldn't always yield the correct result, so those have
# to be covered by "real" unit tests.
if b'external.asdf' not in buff.getvalue():
buff = io.BytesIO()
ff.write_to(buff)
......@@ -19,6 +19,7 @@ from .. import block
from .. import constants
from .. import generic_io
from .. import treeutil
from .. import versioning
from ..tests.helpers import assert_tree_match
......@@ -1147,6 +1148,15 @@ foo : bar
assert version.patch == 0
def test_default_version():
# See https://github.com/spacetelescope/asdf/issues/364
version_map = versioning.get_version_map(versioning.default_version)
ff = asdf.AsdfFile()
assert ff.file_format_version == version_map['FILE_FORMAT']
def test_fd_not_seekable():
data = np.ones(1024)
b = block.Block(data=data)
......
This diff is collapsed.
......@@ -4,13 +4,7 @@
from __future__ import absolute_import, division, unicode_literals, print_function
import io
try:
import astropy
except ImportError:
HAS_ASTROPY = False
else:
HAS_ASTROPY = True
from collections import OrderedDict
import numpy as np
......@@ -20,8 +14,6 @@ import six
import yaml
from collections import OrderedDict
from .. import asdf
from .. import tagged
from .. import treeutil
......@@ -129,23 +121,19 @@ def test_tags_removed_after_load(tmpdir):
helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf)
@pytest.mark.skipif('not HAS_ASTROPY')
def test_explicit_tags():
yaml = """#ASDF 1.0.0
yaml = """#ASDF {}
%YAML 1.1
--- !<tag:stsci.edu:asdf/core/asdf-1.0.0>
unit: !<tag:stsci.edu:asdf/unit/unit-1.0.0> m
foo: !<tag:stsci.edu:asdf/core/ndarray-1.0.0> [1, 2, 3]
...
"""
from astropy import units as u
# Check that fully-qualified explicit tags work
""".format(asdf.versioning.default_version)
# Check that fully qualified explicit tags work
buff = helpers.yaml_to_asdf(yaml, yaml_headers=False)
ff = asdf.AsdfFile.open(buff)
assert isinstance(ff.tree['unit'], u.UnitBase)
with asdf.AsdfFile.open(buff) as ff:
assert all(ff.tree['foo'] == [1, 2, 3])
def test_yaml_internal_reference(tmpdir):
......
# Autogenerated by Astropy-affiliated package asdf's setup.py on 2017-10-24 16:36:57
# Autogenerated by Astropy-affiliated package asdf's setup.py on 2017-11-03 15:55:17
from __future__ import unicode_literals
import datetime
......@@ -186,8 +186,8 @@ def _get_repo_path(pathname, levels=None): # pragma: no cover
return None
_packagename = "asdf"
_last_generated_version = "1.3.0"
_last_githash = "5d88fca400d2c7adb85248f0dabd6dc000aef19b"
_last_generated_version = "1.3.1"
_last_githash = "93d796bb44d3df8b6131d8560aeaca32471b8719"
# Determine where the source code for this module
# lives. If __file__ is not a filesystem path then
......@@ -205,10 +205,10 @@ else:
major = 1
minor = 3
bugfix = 0
bugfix = 1
release = True
timestamp = datetime.datetime(2017, 10, 24, 16, 36, 57)
timestamp = datetime.datetime(2017, 11, 3, 15, 55, 17)
debug = False
try:
......
......@@ -51,7 +51,7 @@ LONG_DESCRIPTION = package.__doc__
builtins._PACKAGE_NAME_ = 'asdf'
# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
VERSION = '1.3.0'
VERSION = '1.3.1'
# Indicates if this version is a release version
RELEASE = 'dev' not in VERSION
......@@ -120,7 +120,7 @@ setup(name=PACKAGENAME,
scripts=scripts,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',
install_requires=[
'semantic_version>=2.6.0',
'semantic_version>=2.3.1',
'pyyaml>=3.10',
'jsonschema>=2.3.0',
'six>=1.9.0',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment