Skip to content
Commits on Source (18)
......@@ -9,11 +9,16 @@ include tests/wf/*
include tests/override/*
include tests/checker_wf/*
include tests/subgraph/*
include tests/trs/*
include cwltool/schemas/v1.0/*.yml
include cwltool/schemas/v1.0/*.yml
include cwltool/schemas/v1.0/*.md
include cwltool/schemas/v1.0/salad/schema_salad/metaschema/*.yml
include cwltool/schemas/v1.0/salad/schema_salad/metaschema/*.md
include cwltool/schemas/v1.1/*.yml
include cwltool/schemas/v1.1/*.md
include cwltool/schemas/v1.1/salad/schema_salad/metaschema/*.yml
include cwltool/schemas/v1.1/salad/schema_salad/metaschema/*.md
include cwltool/schemas/v1.1.0-dev1/*.yml
include cwltool/schemas/v1.1.0-dev1/*.md
include cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/*.yml
......
......@@ -27,11 +27,11 @@ MODULE=cwltool
# `[[` conditional expressions.
PYSOURCES=$(wildcard ${MODULE}/**.py tests/*.py) setup.py
DEVPKGS=pycodestyle diff_cover autopep8 pylint coverage pydocstyle flake8 \
pytest pytest-xdist isort
pytest-xdist==1.27.0 isort wheel -rtest-requirements.txt
DEBDEVPKGS=pep8 python-autopep8 pylint python-coverage pydocstyle sloccount \
python-flake8 python-mock shellcheck
VERSION=1.0.$(shell date +%Y%m%d%H%M%S --utc --date=`git log --first-parent \
--max-count=1 --format=format:%cI`)
VERSION=1.0.$(shell TZ=UTC git log --first-parent --max-count=1 \
--format=format:%cd --date=format-local:%Y%m%d%H%M%S)
mkfile_dir := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
UNAME_S=$(shell uname -s)
ifeq ($(UNAME_S),Linux)
......@@ -51,7 +51,7 @@ help: Makefile
## install-dep : install most of the development dependencies via pip
install-dep:
pip install --upgrade $(DEVPKGS) -rtest-requirements.txt
pip install --upgrade $(DEVPKGS)
## install-deb-dep: install most of the dev dependencies via apt-get
install-deb-dep:
......@@ -100,13 +100,13 @@ diff_pycodestyle_report: pycodestyle_report.txt
pep257: pydocstyle
## pydocstyle : check Python code style
pydocstyle: $(PYSOURCES)
pydocstyle --ignore=D100,D101,D102,D103 $^ || true
pydocstyle --add-ignore=D100,D101,D102,D103 $^ || true
pydocstyle_report.txt: $(PYSOURCES)
pydocstyle setup.py $^ > $@ 2>&1 || true
diff_pydocstyle_report: pydocstyle_report.txt
diff-quality --violations=pycodestyle $^
diff-quality --violations=pycodestyle --fail-under=100 $^
## autopep8 : fix most Python code indentation and formatting
autopep8: $(PYSOURCES)
......@@ -175,9 +175,6 @@ mypy2: ${PYSOURCES}
rm -Rf typeshed/2and3/ruamel/yaml
ln -s $(shell python -c 'from __future__ import print_function; import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))') \
typeshed/2and3/ruamel/yaml
rm -Rf typeshed/2and3/schema_salad
ln -s $(shell python -c 'from __future__ import print_function; import schema_salad; import os.path; print(os.path.dirname(schema_salad.__file__))') \
typeshed/2and3/schema_salad
MYPYPATH=$$MYPYPATH:typeshed/2.7:typeshed/2and3 mypy --py2 --disallow-untyped-calls \
--warn-redundant-casts \
cwltool
......@@ -186,9 +183,6 @@ mypy3: ${PYSOURCES}
rm -Rf typeshed/2and3/ruamel/yaml
ln -s $(shell python3 -c 'from __future__ import print_function; import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))') \
typeshed/2and3/ruamel/yaml
rm -Rf typeshed/2and3/schema_salad
ln -s $(shell python3 -c 'from __future__ import print_function; import schema_salad; import os.path; print(os.path.dirname(schema_salad.__file__))') \
typeshed/2and3/schema_salad
MYPYPATH=$$MYPYPATH:typeshed/3:typeshed/2and3 mypy --disallow-untyped-calls \
--warn-redundant-casts \
cwltool
......
Metadata-Version: 2.1
Name: cwltool
Version: 1.0.20181217162649
Version: 1.0.20190815141648
Summary: Common workflow language reference implementation
Home-page: https://github.com/common-workflow-language/cwltool
Author: Common workflow language working group
......@@ -11,7 +11,7 @@ Description: ==================================================================
Common Workflow Language tool description reference implementation
==================================================================
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status|
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status| |Downloads|
.. |Conformance Status| image:: https://ci.commonwl.org/buildStatus/icon?job=cwltool-conformance
......@@ -26,12 +26,15 @@ Description: ==================================================================
.. |Coverage Status| image:: https://img.shields.io/codecov/c/github/common-workflow-language/cwltool.svg
:target: https://codecov.io/gh/common-workflow-language/cwltool
.. |Downloads| image:: https://pepy.tech/badge/cwltool/month
:target: https://pepy.tech/project/cwltool
This is the reference implementation of the Common Workflow Language. It is
intended to be feature complete and provide comprehensive validation of CWL
files as well as provide other tools related to working with CWL.
This is written and tested for
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 4, 5, 6, 7}``
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 5, 6, 7}``
The reference implementation consists of two packages. The ``cwltool`` package
is the primary Python module containing the reference implementation in the
......@@ -44,7 +47,21 @@ Description: ==================================================================
Install
-------
It is highly recommended to setup virtual environment before installing `cwltool`:
Your operating system may offer cwltool directly. For [Debian](https://tracker.debian.org/pkg/cwltool "Debian cwltool package tracker") or [Ubuntu](https://launchpad.net/ubuntu/+source/cwltool "Ubuntu Launchpad overview for cwltool") try
.. code:: bash
apt-get install cwltool
For MacOS X, other UNIXes or Windows packages prepared by the Bioconda project. Please follow instructions of Bioconda (https://bioconda.github.io/) for its installation, then perform:
.. code:: bash
conda install -c bioconda cwltool
Under the hood, conda setups virtual environments before installing `cwltool` to
avoid conflicting versions of the same library. When installing cwltool directly,
it is recommended to do the same manually:
.. code:: bash
......@@ -130,10 +147,14 @@ Description: ==================================================================
cwltool --user-space-docker-cmd=dx-docker https://raw.githubusercontent.com/common-workflow-language/common-workflow-language/master/v1.0/v1.0/test-cwl-out2.cwl https://github.com/common-workflow-language/common-workflow-language/blob/master/v1.0/v1.0/empty.json
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ as a Docker container runtime, an experimental feature.
Singularity will run software containers specified in ``DockerRequirement`` and therefore works with Docker images only,
native Singularity images are not supported.
To use Singularity as the Docker container runtime, provide ``--singularity`` command line option to ``cwltool``.
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ version 2.6.1
or later as a Docker container runtime.
``cwltool`` with Singularity will run software containers specified in
``DockerRequirement`` and therefore works with Docker images only, native
Singularity images are not supported. To use Singularity as the Docker container
runtime, provide ``--singularity`` command line option to ``cwltool``.
With Singularity, ``cwltool`` can pass all CWL v1.0 conformance tests, except
those involving Docker container ENTRYPOINTs.
.. code:: bash
......@@ -504,7 +525,7 @@ Description: ==================================================================
- Running basic tests ``(/tests)``:
To run the basis tests after installing `cwltool` execute the following:
To run the basic tests after installing `cwltool` execute the following:
.. code:: bash
......@@ -730,7 +751,6 @@ Classifier: Operating System :: Microsoft :: Windows :: Windows 8.1
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
......
......@@ -2,7 +2,7 @@
Common Workflow Language tool description reference implementation
==================================================================
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status|
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status| |Downloads|
.. |Conformance Status| image:: https://ci.commonwl.org/buildStatus/icon?job=cwltool-conformance
......@@ -17,12 +17,15 @@ CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Cov
.. |Coverage Status| image:: https://img.shields.io/codecov/c/github/common-workflow-language/cwltool.svg
:target: https://codecov.io/gh/common-workflow-language/cwltool
.. |Downloads| image:: https://pepy.tech/badge/cwltool/month
:target: https://pepy.tech/project/cwltool
This is the reference implementation of the Common Workflow Language. It is
intended to be feature complete and provide comprehensive validation of CWL
files as well as provide other tools related to working with CWL.
This is written and tested for
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 4, 5, 6, 7}``
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 5, 6, 7}``
The reference implementation consists of two packages. The ``cwltool`` package
is the primary Python module containing the reference implementation in the
......@@ -35,7 +38,21 @@ default CWL interpreter installed on a host.
Install
-------
It is highly recommended to setup virtual environment before installing `cwltool`:
Your operating system may offer cwltool directly. For [Debian](https://tracker.debian.org/pkg/cwltool "Debian cwltool package tracker") or [Ubuntu](https://launchpad.net/ubuntu/+source/cwltool "Ubuntu Launchpad overview for cwltool") try
.. code:: bash
apt-get install cwltool
For MacOS X, other UNIXes or Windows packages prepared by the Bioconda project. Please follow instructions of Bioconda (https://bioconda.github.io/) for its installation, then perform:
.. code:: bash
conda install -c bioconda cwltool
Under the hood, conda setups virtual environments before installing `cwltool` to
avoid conflicting versions of the same library. When installing cwltool directly,
it is recommended to do the same manually:
.. code:: bash
......@@ -121,10 +138,14 @@ or
cwltool --user-space-docker-cmd=dx-docker https://raw.githubusercontent.com/common-workflow-language/common-workflow-language/master/v1.0/v1.0/test-cwl-out2.cwl https://github.com/common-workflow-language/common-workflow-language/blob/master/v1.0/v1.0/empty.json
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ as a Docker container runtime, an experimental feature.
Singularity will run software containers specified in ``DockerRequirement`` and therefore works with Docker images only,
native Singularity images are not supported.
To use Singularity as the Docker container runtime, provide ``--singularity`` command line option to ``cwltool``.
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ version 2.6.1
or later as a Docker container runtime.
``cwltool`` with Singularity will run software containers specified in
``DockerRequirement`` and therefore works with Docker images only, native
Singularity images are not supported. To use Singularity as the Docker container
runtime, provide ``--singularity`` command line option to ``cwltool``.
With Singularity, ``cwltool`` can pass all CWL v1.0 conformance tests, except
those involving Docker container ENTRYPOINTs.
.. code:: bash
......@@ -495,7 +516,7 @@ Running tests locally
- Running basic tests ``(/tests)``:
To run the basis tests after installing `cwltool` execute the following:
To run the basic tests after installing `cwltool` execute the following:
.. code:: bash
......
Metadata-Version: 2.1
Name: cwltool
Version: 1.0.20181217162649
Version: 1.0.20190815141648
Summary: Common workflow language reference implementation
Home-page: https://github.com/common-workflow-language/cwltool
Author: Common workflow language working group
......@@ -11,7 +11,7 @@ Description: ==================================================================
Common Workflow Language tool description reference implementation
==================================================================
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status|
CWL conformance tests: |Conformance Status| |Linux Status| |Windows Status| |Coverage Status| |Downloads|
.. |Conformance Status| image:: https://ci.commonwl.org/buildStatus/icon?job=cwltool-conformance
......@@ -26,12 +26,15 @@ Description: ==================================================================
.. |Coverage Status| image:: https://img.shields.io/codecov/c/github/common-workflow-language/cwltool.svg
:target: https://codecov.io/gh/common-workflow-language/cwltool
.. |Downloads| image:: https://pepy.tech/badge/cwltool/month
:target: https://pepy.tech/project/cwltool
This is the reference implementation of the Common Workflow Language. It is
intended to be feature complete and provide comprehensive validation of CWL
files as well as provide other tools related to working with CWL.
This is written and tested for
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 4, 5, 6, 7}``
`Python <https://www.python.org/>`_ ``2.7 and 3.x {x = 5, 6, 7}``
The reference implementation consists of two packages. The ``cwltool`` package
is the primary Python module containing the reference implementation in the
......@@ -44,7 +47,21 @@ Description: ==================================================================
Install
-------
It is highly recommended to setup virtual environment before installing `cwltool`:
Your operating system may offer cwltool directly. For [Debian](https://tracker.debian.org/pkg/cwltool "Debian cwltool package tracker") or [Ubuntu](https://launchpad.net/ubuntu/+source/cwltool "Ubuntu Launchpad overview for cwltool") try
.. code:: bash
apt-get install cwltool
For MacOS X, other UNIXes or Windows packages prepared by the Bioconda project. Please follow instructions of Bioconda (https://bioconda.github.io/) for its installation, then perform:
.. code:: bash
conda install -c bioconda cwltool
Under the hood, conda setups virtual environments before installing `cwltool` to
avoid conflicting versions of the same library. When installing cwltool directly,
it is recommended to do the same manually:
.. code:: bash
......@@ -130,10 +147,14 @@ Description: ==================================================================
cwltool --user-space-docker-cmd=dx-docker https://raw.githubusercontent.com/common-workflow-language/common-workflow-language/master/v1.0/v1.0/test-cwl-out2.cwl https://github.com/common-workflow-language/common-workflow-language/blob/master/v1.0/v1.0/empty.json
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ as a Docker container runtime, an experimental feature.
Singularity will run software containers specified in ``DockerRequirement`` and therefore works with Docker images only,
native Singularity images are not supported.
To use Singularity as the Docker container runtime, provide ``--singularity`` command line option to ``cwltool``.
``cwltool`` can use `Singularity <http://singularity.lbl.gov/>`_ version 2.6.1
or later as a Docker container runtime.
``cwltool`` with Singularity will run software containers specified in
``DockerRequirement`` and therefore works with Docker images only, native
Singularity images are not supported. To use Singularity as the Docker container
runtime, provide ``--singularity`` command line option to ``cwltool``.
With Singularity, ``cwltool`` can pass all CWL v1.0 conformance tests, except
those involving Docker container ENTRYPOINTs.
.. code:: bash
......@@ -504,7 +525,7 @@ Description: ==================================================================
- Running basic tests ``(/tests)``:
To run the basis tests after installing `cwltool` execute the following:
To run the basic tests after installing `cwltool` execute the following:
.. code:: bash
......@@ -730,7 +751,6 @@ Classifier: Operating System :: Microsoft :: Windows :: Windows 8.1
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
......
......@@ -96,18 +96,29 @@ cwltool/schemas/v1.0/salad/schema_salad/metaschema/vocab_res.yml
cwltool/schemas/v1.0/salad/schema_salad/metaschema/vocab_res_proc.yml
cwltool/schemas/v1.0/salad/schema_salad/metaschema/vocab_res_schema.yml
cwltool/schemas/v1.0/salad/schema_salad/metaschema/vocab_res_src.yml
cwltool/schemas/v1.1/CODE_OF_CONDUCT.md
cwltool/schemas/v1.1/CONFORMANCE_TESTS.md
cwltool/schemas/v1.1/CommandLineTool-standalone.yml
cwltool/schemas/v1.1/CommandLineTool.yml
cwltool/schemas/v1.1/CommonWorkflowLanguage.yml
cwltool/schemas/v1.1/Process.yml
cwltool/schemas/v1.1/README.md
cwltool/schemas/v1.1/Workflow.yml
cwltool/schemas/v1.1/concepts.md
cwltool/schemas/v1.1/contrib.md
cwltool/schemas/v1.1/index.md
cwltool/schemas/v1.1/intro.md
cwltool/schemas/v1.1/invocation.md
cwltool/schemas/v1.1.0-dev1/CommandLineTool-standalone.yml
cwltool/schemas/v1.1.0-dev1/CommandLineTool.yml
cwltool/schemas/v1.1.0-dev1/CommonWorkflowLanguage.yml
cwltool/schemas/v1.1.0-dev1/Process.yml
cwltool/schemas/v1.1.0-dev1/README.md
cwltool/schemas/v1.1.0-dev1/UserGuide.yml
cwltool/schemas/v1.1.0-dev1/Workflow.yml
cwltool/schemas/v1.1.0-dev1/concepts.md
cwltool/schemas/v1.1.0-dev1/contrib.md
cwltool/schemas/v1.1.0-dev1/intro.md
cwltool/schemas/v1.1.0-dev1/invocation.md
cwltool/schemas/v1.1.0-dev1/userguide-intro.md
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/field_name.yml
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml
......@@ -136,13 +147,45 @@ cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/vocab_res.yml
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml
cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/field_name.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/field_name_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/field_name_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/field_name_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/ident_res.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/ident_res_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/ident_res_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/ident_res_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/import_include.md
cwltool/schemas/v1.1/salad/schema_salad/metaschema/link_res.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/link_res_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/link_res_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/link_res_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/map_res.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/map_res_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/map_res_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/map_res_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/metaschema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/metaschema_base.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/salad.md
cwltool/schemas/v1.1/salad/schema_salad/metaschema/typedsl_res.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/typedsl_res_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/typedsl_res_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/typedsl_res_src.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_proc.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_schema.yml
cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_src.yml
tests/2.fasta
tests/2.fastq
tests/__init__.py
tests/bundle-context.jsonld
tests/debian_image_id.cwl
tests/echo-job.yaml
tests/echo-position-expr-job.yml
tests/echo-position-expr.cwl
tests/echo.cwl
tests/echo_broken_outputs.cwl
tests/env.cwl
tests/listing-job.yml
tests/listing2-job.yml
tests/non_portable.cwl
......@@ -151,10 +194,14 @@ tests/portable.cwl
tests/random_lines.cwl
tests/random_lines_job.json
tests/random_lines_mapping.cwl
tests/secondary-files-job.yml
tests/secondary-files-string-v1.cwl
tests/secondary-files.cwl
tests/seqtk_seq.cwl
tests/seqtk_seq_job.json
tests/seqtk_seq_with_docker.cwl
tests/seqtk_seq_wrong_name.cwl
tests/sing_pullfolder_test.cwl
tests/test_anon_types.py
tests/test_bad_outputs_wf.cwl
tests/test_check.py
......@@ -175,6 +222,7 @@ tests/test_fetch.py
tests/test_http_input.py
tests/test_iwdr.py
tests/test_js_sandbox.py
tests/test_load_tool.py
tests/test_override.py
tests/test_pack.py
tests/test_parallel.py
......@@ -188,12 +236,14 @@ tests/test_singularity.py
tests/test_subgraph.py
tests/test_target.py
tests/test_toolargparse.py
tests/test_trs.py
tests/test_udocker.py
tests/test_validate_js.py
tests/utf_doc_example.cwl
tests/util.py
tests/checker_wf/broken-wf.cwl
tests/checker_wf/broken-wf2.cwl
tests/checker_wf/broken-wf3.cwl
tests/checker_wf/cat.cwl
tests/checker_wf/echo.cwl
tests/checker_wf/functional-wf.cwl
......@@ -207,6 +257,7 @@ tests/override/env-tool_cwl-requirement_override.yaml
tests/override/env-tool_cwl-requirement_override_default.yaml
tests/override/env-tool_cwl-requirement_override_default_wrongver.yaml
tests/override/env-tool_v1.1.0-dev1.cwl
tests/override/env-tool_v1.1.cwl
tests/override/ov.yml
tests/override/ov2.yml
tests/override/ov3.yml
......@@ -231,6 +282,9 @@ tests/tmp4/alpha/charlie
tests/tmp4/alpha/delta
tests/tmp4/alpha/echo
tests/tmp4/alpha/foxtrot
tests/trs/Dockstore.cwl
tests/trs/md5sum-tool.cwl
tests/trs/md5sum-workflow.cwl
tests/wf/1st-workflow.cwl
tests/wf/910.cwl
tests/wf/arguments.cwl
......@@ -254,6 +308,7 @@ tests/wf/formattest.cwl
tests/wf/hello-workflow.cwl
tests/wf/hello.txt
tests/wf/hello_single_tool.cwl
tests/wf/iwdr-empty.cwl
tests/wf/iwdr-entry.cwl
tests/wf/iwdr_permutations.cwl
tests/wf/iwdr_permutations_inplace.yml
......@@ -273,6 +328,7 @@ tests/wf/nested.cwl
tests/wf/networkaccess-fail.cwl
tests/wf/networkaccess.cwl
tests/wf/no-parameters-echo.cwl
tests/wf/optional-numerical-output-0.cwl
tests/wf/override-no-secrets.yml
tests/wf/parseInt-tool.cwl
tests/wf/revsort-job.json
......
setuptools
requests>=2.6.1
ruamel.yaml<=0.15.77,>=0.12.4
ruamel.yaml<=0.15.97,>=0.12.4
rdflib<4.3.0,>=4.2.2
shellescape<3.5,>=3.4.1
schema-salad<3.1,>=3.0
schema-salad<5,>=4.5
mypy-extensions
six>=1.9.0
psutil
......@@ -11,15 +11,15 @@ scandir
prov==1.5.1
bagit>=1.6.4
typing-extensions
coloredlogs
future
pathlib2!=2.3.1
[:os.name=="posix" and python_version<"3.5"]
subprocess32>=3.5.0
[:python_version<"3"]
pathlib2==2.3.2
[:python_version<"3.6"]
typing>=3.5.3
[deps]
galaxy-lib>=17.09.9
galaxy-lib<=18.9.2,>=17.09.9
"""Reference implementation of the CWL standards."""
from __future__ import absolute_import
import warnings
import sys
__author__ = 'pamstutz@veritasgenetics.com'
class CWLToolDeprecationWarning(Warning):
pass
# Hate this?
# set PYTHONWARNINGS=ignore:DEPRECATION::cwltool.__init__
if sys.version_info < (3, 0):
warnings.warn("""
DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020.
Please upgrade your Python as the Python 2.7 version of cwltool won't be
maintained after that date.
""", category=CWLToolDeprecationWarning)
......@@ -57,7 +57,7 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
cidgroup.add_argument(
"--cidfile-dir", type=Text, help="Store the Docker "
"container ID into a file in the specifed directory.",
"container ID into a file in the specified directory.",
default=None, dest="cidfile_dir")
cidgroup.add_argument(
......@@ -105,10 +105,10 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--enable-pull", default=True, action="store_true",
help="Try to pull Docker images", dest="enable_pull")
help="Try to pull Docker images", dest="pull_image")
exgroup.add_argument("--disable-pull", default=True, action="store_false",
help="Do not try to pull Docker images", dest="enable_pull")
help="Do not try to pull Docker images", dest="pull_image")
parser.add_argument("--rdf-serializer",
help="Output RDF serialization format used by --print-rdf (one of turtle (default), n3, nt, xml)",
......@@ -211,7 +211,7 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
dockergroup.add_argument("--singularity", action="store_true",
default=False, help="[experimental] Use "
"Singularity runtime for running containers. "
"Requires Singularity v2.3.2+ and Linux with kernel "
"Requires Singularity v2.6.1+ and Linux with kernel "
"version v3.18+ or with overlayfs support "
"backported.")
dockergroup.add_argument("--no-container", action="store_false",
......@@ -250,6 +250,12 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
help="Enable loading and running cwltool extensions "
"to CWL spec.", default=False)
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--enable-color", action="store_true",
help="Enable colored logging (default true)", default=True)
exgroup.add_argument("--disable-color", action="store_false", dest="enable_color",
help="Disable colored logging (default false)")
parser.add_argument("--default-container",
help="Specify a default docker container that will be used if the workflow fails to specify one.")
parser.add_argument("--no-match-user", action="store_true",
......@@ -318,9 +324,7 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
def get_default_args():
# type: () -> Dict[str, Any]
"""
Get default values of cwltool's command line options
"""
"""Get default values of cwltool's command line options."""
ap = arg_parser()
args = ap.parse_args([])
return vars(args)
......@@ -331,14 +335,19 @@ class FSAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
# type: (List[Text], Text, Any, **Any) -> None
"""Fail if nargs is used."""
if nargs is not None:
raise ValueError("nargs not allowed")
super(FSAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
# type: (argparse.ArgumentParser, argparse.Namespace, Union[AnyStr, Sequence[Any], None], AnyStr) -> None
def __call__(self,
parser, # type: argparse.ArgumentParser
namespace, # type: argparse.Namespace
values, # type: Union[AnyStr, Sequence[Any], None]
option_string=None # type: Optional[Text]
): # type: (...) -> None
setattr(namespace,
self.dest, # type: ignore
self.dest,
{"class": self.objclass,
"location": file_uri(str(os.path.abspath(cast(AnyStr, values))))})
......@@ -348,20 +357,21 @@ class FSAppendAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
# type: (List[Text], Text, Any, **Any) -> None
"""Initialize."""
if nargs is not None:
raise ValueError("nargs not allowed")
super(FSAppendAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
# type: (argparse.ArgumentParser, argparse.Namespace, Union[AnyStr, Sequence[Any], None], AnyStr) -> None
g = getattr(namespace,
self.dest # type: ignore
)
def __call__(self,
parser, # type: argparse.ArgumentParser
namespace, # type: argparse.Namespace
values, # type: Union[AnyStr, Sequence[Any], None]
option_string=None # type: Optional[Text]
): # type: (...) -> None
g = getattr(namespace, self.dest)
if not g:
g = []
setattr(namespace,
self.dest, # type: ignore
g)
setattr(namespace, self.dest, g)
g.append(
{"class": self.objclass,
"location": file_uri(str(os.path.abspath(cast(AnyStr, values))))})
......
from __future__ import absolute_import
import copy
import os
import logging
from typing import (Any, Callable, Dict, List, MutableMapping, MutableSequence,
Optional, Set, Tuple, Union)
from typing_extensions import Text, Type, TYPE_CHECKING # pylint: disable=unused-import
# move to a regular typing import when Python 3.3-3.6 is no longer supported
from rdflib import Graph, URIRef # pylint: disable=unused-import
from rdflib.namespace import OWL, RDFS
from ruamel.yaml.comments import CommentedMap
......@@ -12,7 +16,10 @@ from schema_salad import validate
from schema_salad.schema import Names, convert_to_dict
from schema_salad.avro.schema import make_avsc_object, Schema
from schema_salad.sourceline import SourceLine
from schema_salad.ref_resolver import uri_file_path
from six import iteritems, string_types
from future.utils import raise_from
from typing import IO
from typing_extensions import (TYPE_CHECKING, # pylint: disable=unused-import
Text, Type)
# move to a regular typing import when Python 3.3-3.6 is no longer supported
......@@ -22,7 +29,7 @@ from .errors import WorkflowException
from .loghandler import _logger
from .mutation import MutationManager # pylint: disable=unused-import
from .pathmapper import PathMapper # pylint: disable=unused-import
from .pathmapper import get_listing, normalizeFilesDirs, visit_class
from .pathmapper import CONTENT_LIMIT, get_listing, normalizeFilesDirs, visit_class
from .stdfsaccess import StdFsAccess # pylint: disable=unused-import
from .utils import aslist, docker_windows_path_adjust, json_dumps, onWindows
......@@ -30,18 +37,31 @@ from .utils import aslist, docker_windows_path_adjust, json_dumps, onWindows
if TYPE_CHECKING:
from .provenance import ProvenanceProfile # pylint: disable=unused-import
CONTENT_LIMIT = 64 * 1024
def content_limit_respected_read_bytes(f): # type: (IO[bytes]) -> bytes
contents = f.read(CONTENT_LIMIT + 1)
if len(contents) > CONTENT_LIMIT:
raise WorkflowException("loadContents handling encountered buffer that is exceeds maximum lenght of %d bytes" % CONTENT_LIMIT)
return contents
def content_limit_respected_read(f): # type: (IO[bytes]) -> Text
return content_limit_respected_read_bytes(f).decode("utf-8")
def substitute(value, replace): # type: (Text, Text) -> Text
if replace[0] == "^":
if replace.startswith("^"):
try:
return substitute(value[0:value.rindex('.')], replace[1:])
except ValueError:
# No extension to remove
return value + replace.lstrip("^")
return value + replace
def formatSubclassOf(fmt, cls, ontology, visited):
# type: (Text, Text, Optional[Graph], Set[Text]) -> bool
"""Determine if `fmt` is a subclass of `cls`."""
if URIRef(fmt) == URIRef(cls):
return True
......@@ -72,11 +92,11 @@ def formatSubclassOf(fmt, cls, ontology, visited):
return False
def check_format(actual_file, # type: Union[Dict[Text, Any], List, Text]
def check_format(actual_file, # type: Union[Dict[Text, Any], List[Dict[Text, Any]], Text]
input_formats, # type: Union[List[Text], Text]
ontology # type: Optional[Graph]
): # type: (...) -> None
""" Confirms that the format present is valid for the allowed formats."""
"""Confirm that the format present is valid for the allowed formats."""
for afile in aslist(actual_file):
if not afile:
continue
......@@ -93,9 +113,10 @@ def check_format(actual_file, # type: Union[Dict[Text, Any], List, Text]
json_dumps(afile, indent=4)))
class HasReqsHints(object):
def __init__(self):
self.requirements = [] # List[Dict[Text, Any]]
self.hints = [] # List[Dict[Text, Any]]
def __init__(self): # type: () -> None
"""Initialize this reqs decorator."""
self.requirements = [] # type: List[Dict[Text, Any]]
self.hints = [] # type: List[Dict[Text, Any]]
def get_requirement(self,
feature # type: Text
......@@ -110,7 +131,7 @@ class HasReqsHints(object):
class Builder(HasReqsHints):
def __init__(self,
job, # type: Dict[Text, Union[Dict[Text, Any], List, Text, None]]
job, # type: Dict[Text, expression.JSON]
files, # type: List[Dict[Text, Text]]
bindings, # type: List[Dict[Text, Any]]
schemaDefs, # type: Dict[Text, Dict[Text, Any]]
......@@ -130,9 +151,9 @@ class Builder(HasReqsHints):
loadListing, # type: Text
outdir, # type: Text
tmpdir, # type: Text
stagedir, # type: Text
stagedir # type: Text
): # type: (...) -> None
"""Initialize this Builder."""
self.job = job
self.files = files
self.bindings = bindings
......@@ -187,15 +208,20 @@ class Builder(HasReqsHints):
lead_pos = []
bindings = [] # type: List[MutableMapping[Text, Text]]
binding = None # type: Optional[MutableMapping[Text,Any]]
binding = {} # type: Union[MutableMapping[Text, Text], CommentedMap]
value_from_expression = False
if "inputBinding" in schema and isinstance(schema["inputBinding"], MutableMapping):
binding = CommentedMap(schema["inputBinding"].items())
assert binding is not None
bp = list(aslist(lead_pos))
if "position" in binding:
bp.extend(aslist(binding["position"]))
position = binding["position"]
if isinstance(position, str): # no need to test the CWL Version
# the schema for v1.0 only allow ints
binding['position'] = self.do_eval(position, context=datum)
bp.append(binding['position'])
else:
bp.extend(aslist(binding['position']))
else:
bp.append(0)
bp.extend(aslist(tail_pos))
......@@ -216,7 +242,6 @@ class Builder(HasReqsHints):
avsc = self.names.get_name(t["name"], "")
if not avsc:
avsc = make_avsc_object(convert_to_dict(t), self.names)
assert avsc is not None
if validate.validate(avsc, datum):
schema = copy.deepcopy(schema)
schema["type"] = t
......@@ -229,8 +254,7 @@ class Builder(HasReqsHints):
raise validate.ValidationException(u"'%s' is not a valid union %s" % (datum, schema["type"]))
elif isinstance(schema["type"], MutableMapping):
st = copy.deepcopy(schema["type"])
if binding is not None\
and "inputBinding" not in st\
if binding and "inputBinding" not in st\
and "type" in st\
and st["type"] == "array"\
and "itemSeparator" not in binding:
......@@ -256,7 +280,7 @@ class Builder(HasReqsHints):
if schema["type"] == "array":
for n, item in enumerate(datum):
b2 = None
if binding is not None:
if binding:
b2 = copy.deepcopy(binding)
b2["datum"] = item
itemschema = {
......@@ -268,9 +292,9 @@ class Builder(HasReqsHints):
itemschema[k] = schema[k]
bindings.extend(
self.bind_input(itemschema, item, lead_pos=n, tail_pos=tail_pos, discover_secondaryFiles=discover_secondaryFiles))
binding = None
binding = {}
def _capture_files(f):
def _capture_files(f): # type: (Dict[Text, Text]) -> Dict[Text, Text]
self.files.append(f)
return f
......@@ -278,17 +302,26 @@ class Builder(HasReqsHints):
self.files.append(datum)
if (binding and binding.get("loadContents")) or schema.get("loadContents"):
with self.fs_access.open(datum["location"], "rb") as f:
datum["contents"] = f.read(CONTENT_LIMIT).decode("utf-8")
datum["contents"] = content_limit_respected_read(f)
if "secondaryFiles" in schema:
if "secondaryFiles" not in datum:
datum["secondaryFiles"] = []
for sf in aslist(schema["secondaryFiles"]):
if isinstance(sf, MutableMapping) or "$(" in sf or "${" in sf:
sfpath = self.do_eval(sf, context=datum)
if 'required' in sf:
sf_required = self.do_eval(sf['required'], context=datum)
else:
sf_required = True
if "$(" in sf["pattern"] or "${" in sf["pattern"]:
sfpath = self.do_eval(sf["pattern"], context=datum)
else:
sfpath = substitute(datum["basename"], sf)
sfpath = substitute(datum["basename"], sf["pattern"])
for sfname in aslist(sfpath):
if not sfname:
continue
found = False
for d in datum["secondaryFiles"]:
if not d.get("basename"):
......@@ -296,14 +329,15 @@ class Builder(HasReqsHints):
if d["basename"] == sfname:
found = True
if not found:
sf_location = datum["location"][0:datum["location"].rindex("/")+1]+sfname
if isinstance(sfname, MutableMapping):
datum["secondaryFiles"].append(sfname)
elif discover_secondaryFiles:
elif discover_secondaryFiles and self.fs_access.exists(sf_location):
datum["secondaryFiles"].append({
"location": datum["location"][0:datum["location"].rindex("/")+1]+sfname,
"location": sf_location,
"basename": sfname,
"class": "File"})
else:
elif sf_required:
raise WorkflowException("Missing required secondary file '%s' from file object: %s" % (
sfname, json_dumps(datum, indent=4)))
......@@ -314,14 +348,14 @@ class Builder(HasReqsHints):
check_format(datum, self.do_eval(schema["format"]),
self.formatgraph)
except validate.ValidationException as ve:
raise WorkflowException(
raise_from(WorkflowException(
"Expected value of '%s' to have format %s but\n "
" %s" % (schema["name"], schema["format"], ve))
" %s" % (schema["name"], schema["format"], ve)), ve)
visit_class(datum.get("secondaryFiles", []), ("File", "Directory"), _capture_files)
if schema["type"] == "Directory":
ll = self.loadListing or (binding and binding.get("loadListing"))
ll = schema.get("loadListing") or self.loadListing
if ll and ll != "no_listing":
get_listing(self.fs_access, datum, (ll == "deep_listing"))
self.files.append(datum)
......@@ -330,14 +364,14 @@ class Builder(HasReqsHints):
visit_class(datum, ("File", "Directory"), _capture_files)
# Position to front of the sort key
if binding is not None:
if binding:
for bi in bindings:
bi["position"] = binding["position"] + bi["position"]
bindings.append(binding)
return bindings
def tostr(self, value): # type: (Any) -> Text
def tostr(self, value): # type: (Union[MutableMapping[Text, Text], Any]) -> Text
if isinstance(value, MutableMapping) and value.get("class") in ("File", "Directory"):
if "path" not in value:
raise WorkflowException(u"%s object missing \"path\": %s" % (value["class"], value))
......@@ -348,7 +382,6 @@ class Builder(HasReqsHints):
# docker_req is none only when there is no dockerRequirement
# mentioned in hints and Requirement
path = docker_windows_path_adjust(value["path"])
assert path is not None
return path
return value["path"]
else:
......@@ -393,8 +426,7 @@ class Builder(HasReqsHints):
if sep:
args.extend([prefix, self.tostr(j)])
else:
assert prefix is not None
args.append(prefix + self.tostr(j))
args.append("" if not prefix else prefix + self.tostr(j))
return [a for a in args if a is not None]
......
......@@ -23,9 +23,11 @@ def _get_type(tp):
def check_types(srctype, sinktype, linkMerge, valueFrom):
# type: (Any, Any, Optional[Text], Optional[Text]) -> Text
"""Check if the source and sink types are "pass", "warning", or "exception".
"""
Check if the source and sink types are correct.
Acceptable types are "pass", "warning", or "exception".
"""
if valueFrom is not None:
return "pass"
if linkMerge is None:
......@@ -44,9 +46,7 @@ def check_types(srctype, sinktype, linkMerge, valueFrom):
def merge_flatten_type(src):
# type: (Any) -> Any
"""Return the merge flattened type of the source type
"""
"""Return the merge flattened type of the source type."""
if isinstance(src, MutableSequence):
return [merge_flatten_type(t) for t in src]
if isinstance(src, MutableMapping) and src.get("type") == "array":
......@@ -55,7 +55,8 @@ def merge_flatten_type(src):
def can_assign_src_to_sink(src, sink, strict=False): # type: (Any, Any, bool) -> bool
"""Check for identical type specifications, ignoring extra keys like inputBinding.
"""
Check for identical type specifications, ignoring extra keys like inputBinding.
src: admissible source types
sink: admissible sink types
......@@ -63,7 +64,6 @@ def can_assign_src_to_sink(src, sink, strict=False): # type: (Any, Any, bool) -
In non-strict comparison, at least one source type must match one sink type.
In strict comparison, all source types must match at least one sink type.
"""
if src == "Any" or sink == "Any":
return True
if isinstance(src, MutableMapping) and isinstance(sink, MutableMapping):
......@@ -95,17 +95,17 @@ def can_assign_src_to_sink(src, sink, strict=False): # type: (Any, Any, bool) -
if can_assign_src_to_sink(src, this_sink):
return True
return False
return src == sink
return bool(src == sink)
def _compare_records(src, sink, strict=False):
# type: (MutableMapping[Text, Any], MutableMapping[Text, Any], bool) -> bool
"""Compare two records, ensuring they have compatible fields.
"""
Compare two records, ensuring they have compatible fields.
This handles normalizing record names, which will be relative to workflow
step, so that they can be compared.
"""
def _rec_fields(rec): # type: (MutableMapping[Text, Any]) -> MutableMapping[Text, Any]
out = {}
for field in rec["fields"]:
......@@ -126,11 +126,16 @@ def _compare_records(src, sink, strict=False):
return False
return True
def missing_subset(fullset, subset): # type: (List[Any], List[Any]) -> List[Any]
missing = []
for i in subset:
if i not in fullset:
missing.append(i)
return missing
def static_checker(workflow_inputs, workflow_outputs, step_inputs, step_outputs, param_to_step):
# type: (List[Dict[Text, Any]], List[Dict[Text, Any]], List[Dict[Text, Any]], List[Dict[Text, Any]], Dict[Text, Dict[Text, Any]]) -> None
"""Check if all source and sink types of a workflow are compatible before run time.
"""
"""Check if all source and sink types of a workflow are compatible before run time."""
# source parameters: workflow_inputs and step_outputs
# sink parameters: step_inputs and workflow_outputs
......@@ -152,19 +157,18 @@ def static_checker(workflow_inputs, workflow_outputs, step_inputs, step_outputs,
src = warning.src
sink = warning.sink
linkMerge = warning.linkMerge
if sink.get("secondaryFiles") and sorted(
sink.get("secondaryFiles", [])) != sorted(src.get("secondaryFiles", [])):
msg1 = "Sink '%s'" % (shortname(sink["id"]))
msg2 = SourceLine(sink.get("_tool_entry", sink), "secondaryFiles").makeError(
"expects secondaryFiles: %s but" % (sink.get("secondaryFiles")))
if "secondaryFiles" in src:
msg3 = SourceLine(src, "secondaryFiles").makeError(
"source '%s' has secondaryFiles %s." % (shortname(src["id"]), src.get("secondaryFiles")))
else:
sinksf = sorted([p["pattern"] for p in sink.get("secondaryFiles", []) if p.get("required", True)])
srcsf = sorted([p["pattern"] for p in src.get("secondaryFiles", [])])
# Every secondaryFile required by the sink, should be declared
# by the source
missing = missing_subset(srcsf, sinksf)
if missing:
msg1 = "Parameter '%s' requires secondaryFiles %s but" % (shortname(sink["id"]), missing)
msg3 = SourceLine(src, "id").makeError(
"source '%s' does not include secondaryFiles." % (shortname(src["id"])))
msg4 = SourceLine(src, "id").makeError("To fix, add secondaryFiles: %s to definition of '%s'." % (sink.get("secondaryFiles"), shortname(src["id"])))
msg = SourceLine(sink).makeError("%s\n%s" % (msg1, bullets([msg2, msg3, msg4], " ")))
"source '%s' does not provide those secondaryFiles." % (shortname(src["id"])))
msg4 = SourceLine(src.get("_tool_entry", src), "secondaryFiles").makeError("To resolve, add missing secondaryFiles patterns to definition of '%s' or" % (shortname(src["id"])))
msg5 = SourceLine(sink.get("_tool_entry", sink), "secondaryFiles").makeError("mark missing secondaryFiles in definition of '%s' as optional." % shortname(sink["id"]))
msg = SourceLine(sink).makeError("%s\n%s" % (msg1, bullets([msg3, msg4, msg5], " ")))
elif sink.get("not_connected"):
msg = SourceLine(sink, "type").makeError(
"'%s' is not an input parameter of %s, expected %s"
......@@ -218,10 +222,11 @@ SrcSink = namedtuple("SrcSink", ["src", "sink", "linkMerge"])
def check_all_types(src_dict, sinks, sourceField):
# type: (Dict[Text, Any], List[Dict[Text, Any]], Text) -> Dict[Text, List[SrcSink]]
# sourceField is either "soure" or "outputSource"
"""Given a list of sinks, check if their types match with the types of their sources.
"""
Given a list of sinks, check if their types match with the types of their sources.
sourceField is either "soure" or "outputSource"
"""
validation = {"warning": [], "exception": []} # type: Dict[Text, List[SrcSink]]
for sink in sinks:
if sourceField in sink:
......
This diff is collapsed.
......@@ -24,8 +24,8 @@ if TYPE_CHECKING:
ProvenanceProfile)
class ContextBase(object):
def __init__(self, kwargs=None):
# type: (Optional[Dict[str, Any]]) -> None
def __init__(self, kwargs=None): # type: (Optional[Dict[str, Any]]) -> None
"""Initialize."""
if kwargs:
for k, v in kwargs.items():
if hasattr(self, k):
......@@ -41,12 +41,12 @@ default_make_tool = make_tool_notimpl # type: Callable[[MutableMapping[Text, An
class LoadingContext(ContextBase):
def __init__(self, kwargs=None):
# type: (Optional[Dict[str, Any]]) -> None
def __init__(self, kwargs=None): # type: (Optional[Dict[str, Any]]) -> None
"""Initialize the LoadingContext from the kwargs."""
self.debug = False # type: bool
self.metadata = {} # type: Dict[Text, Any]
self.requirements = None
self.hints = None
self.requirements = None # type: Optional[List[Dict[Text, Any]]]
self.hints = None # type: Optional[List[Dict[Text, Any]]]
self.overrides_list = [] # type: List[Dict[Text, Any]]
self.loader = None # type: Optional[Loader]
self.avsc_names = None # type: Optional[schema.Names]
......@@ -64,6 +64,8 @@ class LoadingContext(ContextBase):
self.host_provenance = False # type: bool
self.user_provenance = False # type: bool
self.prov_obj = None # type: Optional[ProvenanceProfile]
self.do_update = None # type: Optional[bool]
self.jobdefaults = None # type: Optional[MutableMapping[Text, Any]]
super(LoadingContext, self).__init__(kwargs)
......@@ -72,8 +74,8 @@ class LoadingContext(ContextBase):
return copy.copy(self)
class RuntimeContext(ContextBase):
def __init__(self, kwargs=None):
# type: (Optional[Dict[str, Any]]) -> None
def __init__(self, kwargs=None): # type: (Optional[Dict[str, Any]]) -> None
"""Initializet the RuntimeContext from the kwargs."""
select_resources_callable = Callable[ # pylint: disable=unused-variable
[Dict[str, int], RuntimeContext], Dict[str, int]]
self.user_space_docker_cmd = "" # type: Text
......
from __future__ import absolute_import
from typing import IO, Any, Dict
from typing import cast, IO, Any, Dict, MutableMapping
from rdflib import Graph
from schema_salad.jsonld_context import makerdf
......@@ -15,27 +15,26 @@ from .process import Process
def gather(tool, ctx): # type: (Process, ContextType) -> Graph
g = Graph()
def visitor(t):
def visitor(t): # type: (MutableMapping[Text, Any]) -> None
makerdf(t["id"], t, ctx, graph=g)
tool.visit(visitor)
return g
def printrdf(wflow, ctx, style): # type: (Process, ContextType, Text) -> Text
def printrdf(wflow, ctx, style): # type: (Process, ContextType, str) -> Text
"""Serialize the CWL document into a string, ready for printing."""
rdf = gather(wflow, ctx).serialize(format=style, encoding='utf-8')
if not rdf:
return u""
assert rdf is not None
return rdf.decode('utf-8')
return cast(Text, rdf.decode('utf-8'))
def lastpart(uri): # type: (Any) -> Text
uri = Text(uri)
if "/" in uri:
return uri[uri.rindex("/") + 1:]
return uri
uri2 = Text(uri)
if "/" in uri2:
return uri2[uri2.rindex("/") + 1:]
return uri2
def dot_with_parameters(g, stdout): # type: (Graph, IO[Any]) -> None
......
......@@ -155,7 +155,7 @@ class DockerCommandLineJob(ContainerCommandLineJob):
else:
loadproc = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=sys.stderr)
assert loadproc.stdin is not None
assert loadproc.stdin is not None # nosec
_logger.info(u"Sending GET request to %s", docker_requirement["dockerLoad"])
req = requests.get(docker_requirement["dockerLoad"], stream=True)
size = 0
......@@ -229,9 +229,12 @@ class DockerCommandLineJob(ContainerCommandLineJob):
if host_outdir_tgt:
# shortcut, just copy to the output directory
# which is already going to be mounted
if not os.path.exists(os.path.dirname(host_outdir_tgt)):
os.makedirs(os.path.dirname(host_outdir_tgt))
shutil.copy(volume.resolved, host_outdir_tgt)
else:
tmpdir = tempfile.mkdtemp(dir=tmpdir_prefix)
tmp_dir, tmp_prefix = os.path.split(tmpdir_prefix)
tmpdir = tempfile.mkdtemp(prefix=tmp_prefix, dir=tmp_dir)
file_copy = os.path.join(
tmpdir, os.path.basename(volume.resolved))
shutil.copy(volume.resolved, file_copy)
......@@ -249,20 +252,22 @@ class DockerCommandLineJob(ContainerCommandLineJob):
if volume.resolved.startswith("_:"):
# Synthetic directory that needs creating first
if not host_outdir_tgt:
tmp_dir, tmp_prefix = os.path.split(tmpdir_prefix)
new_dir = os.path.join(
tempfile.mkdtemp(dir=tmpdir_prefix),
tempfile.mkdtemp(prefix=tmp_prefix, dir=tmp_dir),
os.path.basename(volume.target))
self.append_volume(runtime, new_dir, volume.target,
writable=True)
elif not os.path.exists(host_outdir_tgt):
os.makedirs(host_outdir_tgt, 0o0755)
os.makedirs(host_outdir_tgt)
else:
if self.inplace_update:
self.append_volume(runtime, volume.resolved, volume.target,
writable=True)
else:
if not host_outdir_tgt:
tmpdir = tempfile.mkdtemp(dir=tmpdir_prefix)
tmp_dir, tmp_prefix = os.path.split(tmpdir_prefix)
tmpdir = tempfile.mkdtemp(prefix=tmp_prefix, dir=tmp_dir)
new_dir = os.path.join(
tmpdir, os.path.basename(volume.resolved))
shutil.copytree(volume.resolved, new_dir)
......@@ -291,7 +296,8 @@ class DockerCommandLineJob(ContainerCommandLineJob):
runtime = [u"docker", u"run", u"-i"]
self.append_volume(runtime, os.path.realpath(self.outdir),
self.builder.outdir, writable=True)
self.append_volume(runtime, os.path.realpath(self.tmpdir), "/tmp",
tmpdir = "/tmp" # nosec
self.append_volume(runtime, os.path.realpath(self.tmpdir), tmpdir,
writable=True)
self.add_volumes(self.pathmapper, runtime, any_path_okay=True,
secret_store=runtimeContext.secret_store,
......@@ -355,7 +361,8 @@ class DockerCommandLineJob(ContainerCommandLineJob):
"please check it first")
exit(2)
else:
cidfile_dir = tempfile.mkdtemp(dir=runtimeContext.tmpdir_prefix)
tmp_dir, tmp_prefix = os.path.split(runtimeContext.tmpdir_prefix)
cidfile_dir = tempfile.mkdtemp(prefix=tmp_prefix, dir=tmp_dir)
cidfile_name = datetime.datetime.now().strftime("%Y%m%d%H%M%S-%f") + ".cid"
if runtimeContext.cidfile_prefix is not None:
......
"""Helper functions for docker."""
from __future__ import absolute_import, print_function
from typing import List, Optional, Tuple # pylint: disable=unused-import
from typing import List, Optional, Tuple, cast # pylint: disable=unused-import
from typing_extensions import Text # pylint: disable=unused-import
# move to a regular typing import when Python 3.3-3.6 is no longer supported
......@@ -11,7 +11,7 @@ from .utils import subprocess
def docker_vm_id(): # type: () -> Tuple[Optional[int], Optional[int]]
"""
Returns the User ID and Group ID of the default docker user inside the VM
Return the User ID and Group ID of the default docker user inside the VM.
When a host is using boot2docker or docker-machine to run docker with
boot2docker.iso (As on Mac OS X), the UID that mounts the shared filesystem
......@@ -28,14 +28,15 @@ def docker_vm_id(): # type: () -> Tuple[Optional[int], Optional[int]]
def check_output_and_strip(cmd): # type: (List[Text]) -> Optional[Text]
"""
Passes a command list to subprocess.check_output, returning None
if an expected exception is raised
Pass a command list to subprocess.check_output.
Returning None if an expected exception is raised
:param cmd: The command to execute
:return: Stripped string output of the command, or None if error
"""
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
return result.strip()
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
return cast(str, result).strip()
except (OSError, subprocess.CalledProcessError, TypeError, AttributeError):
# OSError is raised if command doesn't exist
# CalledProcessError is raised if command returns nonzero
......@@ -45,7 +46,8 @@ def check_output_and_strip(cmd): # type: (List[Text]) -> Optional[Text]
def docker_machine_name(): # type: () -> Optional[Text]
"""
Get the machine name of the active docker-machine machine
Get the machine name of the active docker-machine machine.
:return: Name of the active machine or None if error
"""
return check_output_and_strip(['docker-machine', 'active'])
......@@ -54,7 +56,8 @@ def docker_machine_name(): # type: () -> Optional[Text]
def cmd_output_matches(check_cmd, expected_status):
# type: (List[Text], Text) -> bool
"""
Runs a command and compares output to expected
Run a command and compares output to expected.
:param check_cmd: Command list to execute
:param expected_status: Expected output, e.g. "Running" or "poweroff"
:return: Boolean value, indicating whether or not command result matched
......@@ -64,7 +67,8 @@ def cmd_output_matches(check_cmd, expected_status):
def boot2docker_running(): # type: () -> bool
"""
Checks if boot2docker CLI reports that boot2docker vm is running
Check if boot2docker CLI reports that boot2docker vm is running.
:return: True if vm is running, False otherwise
"""
return cmd_output_matches(['boot2docker', 'status'], 'running')
......@@ -72,7 +76,8 @@ def boot2docker_running(): # type: () -> bool
def docker_machine_running(): # type: () -> bool
"""
Asks docker-machine for active machine and checks if its VM is running
Ask docker-machine for the active machine and checks if its VM is running.
:return: True if vm is running, False otherwise
"""
machine_name = docker_machine_name()
......@@ -83,7 +88,8 @@ def docker_machine_running(): # type: () -> bool
def cmd_output_to_int(cmd): # type: (List[Text]) -> Optional[int]
"""
Runs the provided command and returns the integer value of the result
Run the provided command and returns the integer value of the result.
:param cmd: The command to run
:return: Integer value of result, or None if an error occurred
"""
......@@ -99,7 +105,8 @@ def cmd_output_to_int(cmd): # type: (List[Text]) -> Optional[int]
def boot2docker_id(): # type: () -> Tuple[Optional[int], Optional[int]]
"""
Gets the UID and GID of the docker user inside a running boot2docker vm
Get the UID and GID of the docker user inside a running boot2docker vm.
:return: Tuple (UID, GID), or (None, None) if error (e.g. boot2docker not present or stopped)
"""
uid = cmd_output_to_int(['boot2docker', 'ssh', 'id', '-u'])
......@@ -108,7 +115,8 @@ def boot2docker_id(): # type: () -> Tuple[Optional[int], Optional[int]]
def docker_machine_id(): # type: () -> Tuple[Optional[int], Optional[int]]
"""
Asks docker-machine for active machine and gets the UID of the docker user
Ask docker-machine for active machine and gets the UID of the docker user.
inside the vm
:return: tuple (UID, GID), or (None, None) if error (e.g. docker-machine not present or stopped)
"""
......
......@@ -4,12 +4,15 @@ import datetime
import os
import tempfile
import threading
import logging
from threading import Lock
from abc import ABCMeta, abstractmethod
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
import psutil
from six import string_types, with_metaclass
from typing_extensions import Text # pylint: disable=unused-import
from future.utils import raise_from
from schema_salad.validate import ValidationException
from .builder import Builder # pylint: disable=unused-import
......@@ -24,6 +27,9 @@ from .process import cleanIntermediate, relocateOutputs
from .provenance import ProvenanceProfile
from .utils import DEFAULT_TMP_PREFIX
from .workflow import Workflow, WorkflowJob, WorkflowJobStep
from .command_line_tool import CallbackJob
TMPDIR_LOCK = Lock()
class JobExecutor(with_metaclass(ABCMeta, object)):
......@@ -31,14 +37,15 @@ class JobExecutor(with_metaclass(ABCMeta, object)):
def __init__(self):
# type: (...) -> None
self.final_output = [] # type: List
self.final_status = [] # type: List
self.output_dirs = set() # type: Set
"""Initialize."""
self.final_output = [] # type: List[Union[Dict[Text, Any], List[Dict[Text, Any]]]]
self.final_status = [] # type: List[Text]
self.output_dirs = set() # type: Set[Text]
def __call__(self, *args, **kwargs):
def __call__(self, *args, **kwargs): # type: (*Any, **Any) -> Any
return self.execute(*args, **kwargs)
def output_callback(self, out, process_status):
def output_callback(self, out, process_status): # type: (Dict[Text, Any], Text) -> None
"""Collect the final status and outputs."""
self.final_status.append(process_status)
self.final_output.append(out)
......@@ -47,7 +54,7 @@ class JobExecutor(with_metaclass(ABCMeta, object)):
def run_jobs(self,
process, # type: Process
job_order_object, # type: Dict[Text, Any]
logger,
logger, # type: logging.Logger
runtime_context # type: RuntimeContext
): # type: (...) -> None
"""Execute the jobs for the given Process."""
......@@ -56,10 +63,9 @@ class JobExecutor(with_metaclass(ABCMeta, object)):
process, # type: Process
job_order_object, # type: Dict[Text, Any]
runtime_context, # type: RuntimeContext
logger=_logger,
): # type: (...) -> Tuple[Optional[Dict[Text, Any]], Text]
logger=_logger, # type: logging.Logger
): # type: (...) -> Tuple[Optional[Union[Dict[Text, Any], List[Dict[Text, Any]]]], Text]
"""Execute the process."""
if not runtime_context.basedir:
raise WorkflowException("Must provide 'basedir' in runtimeContext")
......@@ -68,31 +74,30 @@ class JobExecutor(with_metaclass(ABCMeta, object)):
if isinstance(original_outdir, string_types):
finaloutdir = os.path.abspath(original_outdir)
runtime_context = runtime_context.copy()
runtime_context.outdir = tempfile.mkdtemp(
outdir = tempfile.mkdtemp(
prefix=getdefault(runtime_context.tmp_outdir_prefix, DEFAULT_TMP_PREFIX))
self.output_dirs.add(runtime_context.outdir)
self.output_dirs.add(outdir)
runtime_context.outdir = outdir
runtime_context.mutation_manager = MutationManager()
runtime_context.toplevel = True
runtime_context.workflow_eval_lock = threading.Condition(threading.RLock())
job_reqs = None
if "https://w3id.org/cwl/cwl#requirements" in job_order_object:
if process.metadata["cwlVersion"] == 'v1.0':
if process.metadata.get("http://commonwl.org/cwltool#original_cwlVersion") == 'v1.0':
raise WorkflowException(
"`cwl:requirements` in the input object is not part of CWL "
"v1.0. You can adjust to use `cwltool:overrides` instead; or you "
"can set the cwlVersion to v1.1.0-dev1 or greater and re-run with "
"--enable-dev.")
"can set the cwlVersion to v1.1")
job_reqs = job_order_object["https://w3id.org/cwl/cwl#requirements"]
elif ("cwl:defaults" in process.metadata
and "https://w3id.org/cwl/cwl#requirements"
in process.metadata["cwl:defaults"]):
if process.metadata["cwlVersion"] == 'v1.0':
if process.metadata.get("http://commonwl.org/cwltool#original_cwlVersion") == 'v1.0':
raise WorkflowException(
"`cwl:requirements` in the input object is not part of CWL "
"v1.0. You can adjust to use `cwltool:overrides` instead; or you "
"can set the cwlVersion to v1.1.0-dev1 or greater and re-run with "
"--enable-dev.")
"can set the cwlVersion to v1.1")
job_reqs = process.metadata["cwl:defaults"]["https://w3id.org/cwl/cwl#requirements"]
if job_reqs is not None:
for req in job_reqs:
......@@ -134,10 +139,11 @@ class JobExecutor(with_metaclass(ABCMeta, object)):
class SingleJobExecutor(JobExecutor):
"""Default single-threaded CWL reference executor."""
def run_jobs(self,
process, # type: Process
job_order_object, # type: Dict[Text, Any]
logger,
logger, # type: logging.Logger
runtime_context # type: RuntimeContext
): # type: (...) -> None
......@@ -167,16 +173,16 @@ class SingleJobExecutor(JobExecutor):
self.output_dirs.add(job.outdir)
if runtime_context.research_obj is not None:
if not isinstance(process, Workflow):
runtime_context.prov_obj = process.provenance_object
prov_obj = process.provenance_object
else:
runtime_context.prov_obj = job.prov_obj
assert runtime_context.prov_obj
runtime_context.prov_obj.evaluate(
prov_obj = job.prov_obj
if prov_obj:
runtime_context.prov_obj = prov_obj
prov_obj.evaluate(
process, job, job_order_object,
runtime_context.research_obj)
process_run_id =\
runtime_context.prov_obj.record_process_start(
process, job)
prov_obj.record_process_start(process, job)
runtime_context = runtime_context.copy()
runtime_context.process_run_id = process_run_id
job.run(runtime_context)
......@@ -187,7 +193,7 @@ class SingleJobExecutor(JobExecutor):
raise
except Exception as err:
logger.exception("Got workflow error")
raise WorkflowException(Text(err))
raise_from(WorkflowException(Text(err)), err)
class MultithreadedJobExecutor(JobExecutor):
......@@ -200,13 +206,14 @@ class MultithreadedJobExecutor(JobExecutor):
"""
def __init__(self): # type: () -> None
"""Initialize."""
super(MultithreadedJobExecutor, self).__init__()
self.threads = set() # type: Set[threading.Thread]
self.exceptions = [] # type: List[WorkflowException]
self.pending_jobs = [] # type: List[Union[JobBase, WorkflowJob]]
self.pending_jobs_lock = threading.Lock()
self.max_ram = psutil.virtual_memory().available / 2**20
self.max_ram = int(psutil.virtual_memory().available / 2**20)
self.max_cores = psutil.cpu_count()
self.allocated_ram = 0
self.allocated_cores = 0
......@@ -231,37 +238,12 @@ class MultithreadedJobExecutor(JobExecutor):
return result
def run_job(self,
job, # type: Union[JobBase, WorkflowJob]
runtime_context # type: RuntimeContext
): # type: (...) -> None
""" Execute a single Job in a seperate thread. """
if job is not None:
with self.pending_jobs_lock:
self.pending_jobs.append(job)
while self.pending_jobs:
with self.pending_jobs_lock:
job = self.pending_jobs[0]
if isinstance(job, JobBase) \
and \
((self.allocated_ram + job.builder.resources["ram"])
> self.max_ram
or (self.allocated_cores + job.builder.resources["cores"])
> self.max_cores):
_logger.warning(
'Job "%s" requested more resources (%s) than are '
'available (max ram is %f, max cores is %f)',
job.name, job.builder.resources, self.max_ram,
self.max_cores)
return
self.pending_jobs.remove(job)
def runner():
def _runner(self, job, runtime_context, TMPDIR_LOCK):
# type: (Union[JobBase, WorkflowJob, CallbackJob], RuntimeContext, threading.Lock) -> None
"""Job running thread."""
try:
job.run(runtime_context)
_logger.debug("job: {}, runtime_context: {}, TMPDIR_LOCK: {}".format(job, runtime_context, TMPDIR_LOCK))
job.run(runtime_context, TMPDIR_LOCK)
except WorkflowException as err:
_logger.exception("Got workflow error")
self.exceptions.append(err)
......@@ -269,6 +251,7 @@ class MultithreadedJobExecutor(JobExecutor):
_logger.exception("Got workflow error")
self.exceptions.append(WorkflowException(Text(err)))
finally:
if runtime_context.workflow_eval_lock:
with runtime_context.workflow_eval_lock:
self.threads.remove(threading.current_thread())
if isinstance(job, JobBase):
......@@ -276,14 +259,59 @@ class MultithreadedJobExecutor(JobExecutor):
self.allocated_cores -= job.builder.resources["cores"]
runtime_context.workflow_eval_lock.notifyAll()
thread = threading.Thread(target=runner)
def run_job(self,
job, # type: Union[JobBase, WorkflowJob, None]
runtime_context # type: RuntimeContext
): # type: (...) -> None
"""Execute a single Job in a seperate thread."""
if job is not None:
with self.pending_jobs_lock:
self.pending_jobs.append(job)
with self.pending_jobs_lock:
n = 0
while (n+1) <= len(self.pending_jobs):
job = self.pending_jobs[n]
if isinstance(job, JobBase):
if ((job.builder.resources["ram"])
> self.max_ram
or (job.builder.resources["cores"])
> self.max_cores):
_logger.error(
'Job "%s" cannot be run, requests more resources (%s) '
'than available on this host (max ram %d, max cores %d',
job.name, job.builder.resources,
self.allocated_ram,
self.allocated_cores,
self.max_ram,
self.max_cores)
self.pending_jobs.remove(job)
return
if ((self.allocated_ram + job.builder.resources["ram"])
> self.max_ram
or (self.allocated_cores + job.builder.resources["cores"])
> self.max_cores):
_logger.debug(
'Job "%s" cannot run yet, resources (%s) are not '
'available (already allocated ram is %d, allocated cores is %d, '
'max ram %d, max cores %d',
job.name, job.builder.resources,
self.allocated_ram,
self.allocated_cores,
self.max_ram,
self.max_cores)
n += 1
continue
thread = threading.Thread(target=self._runner, args=(job, runtime_context, TMPDIR_LOCK))
thread.daemon = True
self.threads.add(thread)
if isinstance(job, JobBase):
self.allocated_ram += job.builder.resources["ram"]
self.allocated_cores += job.builder.resources["cores"]
thread.start()
self.pending_jobs.remove(job)
def wait_for_next_completion(self, runtime_context):
# type: (RuntimeContext) -> None
......@@ -296,7 +324,7 @@ class MultithreadedJobExecutor(JobExecutor):
def run_jobs(self,
process, # type: Process
job_order_object, # type: Dict[Text, Any]
logger,
logger, # type: logging.Logger
runtime_context # type: RuntimeContext
): # type: (...) -> None
......@@ -324,6 +352,9 @@ class MultithreadedJobExecutor(JobExecutor):
logger.error("Workflow cannot make any more progress.")
break
self.run_job(None, runtime_context)
while self.threads:
self.wait_for_next_completion(runtime_context)
self.run_job(None, runtime_context)
runtime_context.workflow_eval_lock.release()
......@@ -3,11 +3,12 @@ from __future__ import absolute_import
import copy
import re
from typing import (Any, Dict, List, MutableMapping, MutableSequence, Optional,
from typing import (Any, Dict, List, Mapping, MutableMapping, MutableSequence, Optional,
Union)
import six
from six import string_types, u
from future.utils import raise_from
from typing_extensions import Text # pylint: disable=unused-import
# move to a regular typing import when Python 3.3-3.6 is no longer supported
......@@ -119,7 +120,7 @@ def scanner(scan): # type: (Text) -> List[int]
return []
def next_seg(parsed_string, remaining_string, current_value): # type: (Text, Text, Any) -> Any
def next_seg(parsed_string, remaining_string, current_value): # type: (Text, Text, JSON) -> JSON
if remaining_string:
m = segment_re.match(remaining_string)
if not m:
......@@ -143,16 +144,24 @@ def next_seg(parsed_string, remaining_string, current_value): # type: (Text, Te
try:
key = int(next_segment_str[1:-1])
except ValueError as v:
raise WorkflowException(u(str(v)))
raise_from(WorkflowException(u(str(v))), v)
if not isinstance(current_value, MutableSequence):
raise WorkflowException("%s is a %s, cannot index on int '%s'" % (parsed_string, type(current_value).__name__, key))
if key >= len(current_value):
raise WorkflowException("%s list index %i out of range" % (parsed_string, key))
if isinstance(current_value, Mapping):
try:
return next_seg(parsed_string + remaining_string, remaining_string[m.end(0):], current_value[key])
except KeyError:
raise WorkflowException("%s doesn't have property %s" % (parsed_string, key))
elif isinstance(current_value, list) and isinstance(key, int):
try:
return next_seg(parsed_string + remaining_string, remaining_string[m.end(0):], current_value[key])
except KeyError:
raise WorkflowException("%s doesn't have property %s" % (parsed_string, key))
else:
raise WorkflowException("%s doesn't have property %s" % (parsed_string, key))
else:
return current_value
......@@ -246,8 +255,8 @@ def needs_parsing(snippet): # type: (Any) -> bool
return isinstance(snippet, string_types) \
and ("$(" in snippet or "${" in snippet)
def do_eval(ex, # type: Union[Text, Dict]
jobinput, # type: Dict[Text, Union[Dict, List, Text, None]]
def do_eval(ex, # type: Union[Text, Dict[Text, Text]]
jobinput, # type: Dict[Text, JSON]
requirements, # type: List[Dict[Text, Any]]
outdir, # type: Optional[Text]
tmpdir, # type: Optional[Text]
......@@ -261,8 +270,8 @@ def do_eval(ex, # type: Union[Text, Dict]
): # type: (...) -> Any
runtime = copy.deepcopy(resources) # type: Dict[str, Any]
runtime["tmpdir"] = docker_windows_path_adjust(tmpdir)
runtime["outdir"] = docker_windows_path_adjust(outdir)
runtime["tmpdir"] = docker_windows_path_adjust(tmpdir) if tmpdir else None
runtime["outdir"] = docker_windows_path_adjust(outdir) if outdir else None
rootvars = {
u"inputs": jobinput,
......@@ -274,8 +283,7 @@ def do_eval(ex, # type: Union[Text, Dict]
if six.PY3:
rootvars = bytes2str_in_dicts(rootvars) # type: ignore
if needs_parsing(ex):
assert isinstance(ex, string_types)
if isinstance(ex, string_types) and needs_parsing(ex):
fullJS = False
jslib = u""
for r in reversed(requirements):
......@@ -296,6 +304,6 @@ def do_eval(ex, # type: Union[Text, Dict]
strip_whitespace=strip_whitespace)
except Exception as e:
raise WorkflowException("Expression evaluation error:\n%s" % e)
raise_from(WorkflowException("Expression evaluation error:\n%s" % Text(e)), e)
else:
return ex
......@@ -2,7 +2,7 @@ from __future__ import absolute_import
import os
from typing import Callable as tCallable # pylint: disable=unused-import
from typing import Any, Dict, Tuple, Union
from typing import Any, Dict, Optional, Tuple, Union
from typing_extensions import Text # pylint: disable=unused-import
# move to a regular typing import when Python 3.3-3.6 is no longer supported
......@@ -16,6 +16,7 @@ from .process import Process
class WorkflowStatus(Exception):
def __init__(self, out, status):
# type: (Dict[Text,Any], Text) -> None
"""Signaling exception for the status of a Workflow."""
super(WorkflowStatus, self).__init__("Completed %s" % status)
self.out = out
self.status = status
......@@ -23,6 +24,7 @@ class WorkflowStatus(Exception):
class Callable(object):
def __init__(self, t, factory): # type: (Process, Factory) -> None
"""Initialize."""
self.t = t
self.factory = factory
......@@ -38,10 +40,11 @@ class Callable(object):
class Factory(object):
def __init__(self,
executor=None, # type: tCallable[...,Tuple[Dict[Text,Any], Text]]
loading_context=None, # type: LoadingContext
runtime_context=None # type: RuntimeContext
executor=None, # type: Optional[tCallable[...,Tuple[Dict[Text,Any], Text]]]
loading_context=None, # type: Optional[LoadingContext]
runtime_context=None # type: Optional[RuntimeContext]
): # type: (...) -> None
"""Easy way to load a CWL document for execution."""
if executor is None:
executor = SingleJobExecutor()
self.executor = executor
......@@ -53,7 +56,7 @@ class Factory(object):
else:
self.runtime_context = runtime_context
def make(self, cwl):
def make(self, cwl): # type: (Union[Text, Dict[Text, Any]]) -> Callable
"""Instantiate a CWL object from a CWl document."""
load = load_tool.load_tool(cwl, self.loading_context)
if isinstance(load, int):
......
from __future__ import absolute_import
from typing import Any, Callable, List, cast
from typing import Any, Callable, List, Text, cast
# http://rightfootin.blogspot.com/2006/09/more-on-python-flatten.html
......@@ -24,4 +24,4 @@ def flatten(l, ltypes=(list, tuple)):
else:
l[i:i + 1] = l[i]
i += 1
return cast(Callable[[Any], List], ltype)(l)
return cast(Callable[[Any], List[Any]], ltype)(l)