Skip to content
GitLab
Explore
Sign in
Register
Commits on Source (2)
cherry pick py3 test fixes from upstream
· 3b215029
Michael R. Crusoe
authored
Jan 07, 2019
In preparation for eventually running the tests here
3b215029
update copyright
· bda14bcc
Michael R. Crusoe
authored
Jan 13, 2019
bda14bcc
Show whitespace changes
Inline
Side-by-side
debian/control
View file @
bda14bcc
...
...
@@ -11,6 +11,7 @@ Build-Depends: debhelper (>= 11~),
python3-docker,
python3-dill,
python3-future,
python3-pytest-xdist,
cwltool,
# documentation
python3-urllib3,
...
...
debian/copyright
View file @
bda14bcc
...
...
@@ -5,10 +5,65 @@ Files-Excluded: src/toil.egg-info/*
Files: *
Copyright: Benedict Paten <benedict@soe.usc.edu>
2015-201
6
Regents of the University of California
2015
UCSC Computational Genomics Lab
2015-201
8
Regents of the University of California
2015
-2018
UCSC Computational Genomics Lab
License: Apache-2.0
Files: ./src/toil/test/cwl/cwlTest.py
Copyright: 2015, Curoverse, Inc
License: Apache-2.0
Files: ./src/toil/batchSystems/slurm.py
Copyright: 2016, Duke Center for Genomic and Computational Biology
License: Apache-2.0
Files: ./src/toil/batchSystems/htcondor.py
Copyright: 2018, HTCondor Team, Computer Sciences Department,
License: Apache-v2.0
Files: ./docs/vendor/sphinxcontrib/fulltoc.py
Copyright: 2012, New Dream Network, LLC (DreamHost)
License: Apache-2.0
Files: ./src/toil/wdl/wdl_parser.py
Copyright: 2015, Broad Institute, Inc.
License: BSD-3-clause
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
.
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
.
* Neither the name Broad Institute, Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF T
Files: ./src/toil/batchSystems/lsfHelper.py
Copyright: 2013-2017 "Rory Kirchne" <rory.kirchner@gmail.com> and contributors
License: Expat
Files: ./contrib/azure/LICENSE
Copyright: 2015, Microsoft Azure
License: Expat
Files: ./src/toil/batchSystems/lsf.py
Copyright: 2013, Thomas Keane <tk2@sanger.ac.uk>
License: Expat
Files: debian/*
Copyright: 2017 Steffen Moeller <moeller@debian.org>
License: Apache-2.0
...
...
@@ -28,3 +83,22 @@ License: Apache-2.0
.
On Debian systems, the complete text of the Apache version 2.0 license
can be found in "/usr/share/common-licenses/Apache-2.0".
License: Expat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
debian/patches/fix_tests
0 → 100644
View file @
bda14bcc
From: Michael R. Crusoe <michael.crusoe@gmail.com>
Subject: cherry pick py3 testing fixes from upstream
--- toil.orig/src/toil/lib/docker.py
+++ toil/src/toil/lib/docker.py
@@ -20,7 +20,6 @@
from docker.errors import APIError
from docker.errors import NotFound
from docker.errors import DockerException
-from docker.utils.types import LogConfig
from docker.api.container import ContainerApiMixin
from toil.lib.retry import retry
--- toil.orig/src/toil/test/jobStores/jobStoreTest.py
+++ toil/src/toil/test/jobStores/jobStoreTest.py
@@ -31,11 +31,11 @@
import logging
import threading
import os
+import sys
import shutil
import tempfile
import time
import uuid
-from stubserver import FTPStubServer
from abc import abstractmethod, ABCMeta
from itertools import chain, islice, count
from threading import Thread
@@ -59,7 +59,6 @@
from toil.job import Job, JobNode
from toil.jobStores.abstractJobStore import (NoSuchJobException,
NoSuchFileException)
-from toil.jobStores.googleJobStore import googleRetry
from toil.jobStores.fileJobStore import FileJobStore
from toil.test import (ToilTest,
needs_aws,
@@ -102,7 +101,7 @@
@classmethod
@memoize
def __new__(cls, *args):
- return super(AbstractJobStoreTest.Test, cls).__new__(*args)
+ return super(AbstractJobStoreTest.Test, cls).__new__(cls)
def _createConfig(self):
return Config()
@@ -403,18 +402,22 @@
"""Tests the sharing of files."""
jobstore1 = self.jobstore_initialized
jobstore2 = self.jobstore_resumed_noconfig
+
+ bar = 'bar'
+ if sys.version_info >= (3, 0):
+ bar = b'bar'
with jobstore1.writeSharedFileStream('foo') as f:
- f.write('bar')
+ f.write(bar)
# ... read that file on worker, ...
with jobstore2.readSharedFileStream('foo') as f:
- self.assertEquals('bar', f.read())
+ self.assertEquals(bar, f.read())
# ... and read it again on jobstore1.
with jobstore1.readSharedFileStream('foo') as f:
- self.assertEquals('bar', f.read())
+ self.assertEquals(bar, f.read())
with jobstore1.writeSharedFileStream('nonEncrypted', isProtected=False) as f:
- f.write('bar')
+ f.write(bar)
self.assertUrl(jobstore1.getSharedPublicUrl('nonEncrypted'))
self.assertRaises(NoSuchFileException, jobstore1.getSharedPublicUrl, 'missing')
@@ -435,12 +438,19 @@
# Check file exists
self.assertTrue(jobstore2.fileExists(fileOne))
self.assertTrue(jobstore1.fileExists(fileOne))
+ one = 'one'
+ two = 'two'
+ three = 'three'
+ if sys.version_info >= (3, 0):
+ one = b'one'
+ two = b'two'
+ three = b'three'
# ... write to the file on jobstore2, ...
with jobstore2.updateFileStream(fileOne) as f:
- f.write('one')
+ f.write(one)
# ... read the file as a stream on the jobstore1, ....
with jobstore1.readFileStream(fileOne) as f:
- self.assertEquals(f.read(), 'one')
+ self.assertEquals(f.read(), one)
# ... and copy it to a temporary physical file on the jobstore1.
fh, path = tempfile.mkstemp()
@@ -452,27 +462,27 @@
shutil.copyfile(tmpPath, path)
finally:
os.unlink(tmpPath)
- with open(path, 'r+') as f:
- self.assertEquals(f.read(), 'one')
+ with open(path, 'rb+') as f:
+ self.assertEquals(f.read(), one)
# Write a different string to the local file ...
f.seek(0)
f.truncate(0)
- f.write('two')
+ f.write(two)
# ... and create a second file from the local file.
fileTwo = jobstore1.writeFile(path, jobOnJobStore1.jobStoreID)
with jobstore2.readFileStream(fileTwo) as f:
- self.assertEquals(f.read(), 'two')
+ self.assertEquals(f.read(), two)
# Now update the first file from the local file ...
jobstore1.updateFile(fileOne, path)
with jobstore2.readFileStream(fileOne) as f:
- self.assertEquals(f.read(), 'two')
+ self.assertEquals(f.read(), two)
finally:
os.unlink(path)
# Create a third file to test the last remaining method.
with jobstore2.writeFileStream(jobOnJobStore1.jobStoreID) as (f, fileThree):
- f.write('three')
+ f.write(three)
with jobstore1.readFileStream(fileThree) as f:
- self.assertEquals(f.read(), 'three')
+ self.assertEquals(f.read(), three)
# Delete a file explicitly but leave files for the implicit deletion through the parent
jobstore2.deleteFile(fileOne)
@@ -510,22 +520,28 @@
# Collects stats and logging messages.
stats = set()
-
+ one = 'one'
+ two = 'two'
+ three = 'three'
+ if sys.version_info >= (3, 0):
+ one = b'one'
+ two = b'two'
+ three = b'three'
# No stats or logging added yet. Expect nothing.
self.assertEquals(0, jobstore1.readStatsAndLogging(callback))
self.assertEquals(set(), stats)
# Test writing and reading.
- jobstore2.writeStatsAndLogging('1')
+ jobstore2.writeStatsAndLogging(one)
self.assertEquals(1, jobstore1.readStatsAndLogging(callback))
- self.assertEquals({'1'}, stats)
+ self.assertEquals({one}, stats)
self.assertEquals(0, jobstore1.readStatsAndLogging(callback)) # readStatsAndLogging purges saved stats etc
- jobstore2.writeStatsAndLogging('1')
- jobstore2.writeStatsAndLogging('2')
+ jobstore2.writeStatsAndLogging(one)
+ jobstore2.writeStatsAndLogging(two)
stats = set()
self.assertEquals(2, jobstore1.readStatsAndLogging(callback))
- self.assertEquals({'1', '2'}, stats)
+ self.assertEquals({one, two}, stats)
largeLogEntry = os.urandom(self._largeLogEntrySize())
stats = set()
@@ -634,7 +650,7 @@
@classmethod
def makeImportExportTests(cls):
- testClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest, GoogleJobStoreTest]
+ testClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest]
activeTestClassesByName = {testCls.__name__: testCls
for testCls in testClasses
@@ -714,7 +730,13 @@
assignedPort = http.server_address[1]
url = 'http://localhost:%d' % assignedPort
with self.jobstore_initialized.readFileStream(self.jobstore_initialized.importFile(url)) as readable:
- self.assertEqual(readable.read(), StubHttpRequestHandler.fileContents)
+ f1 = readable.read()
+ f2 = StubHttpRequestHandler.fileContents
+ if isinstance(f1, bytes) and not isinstance(f2, bytes):
+ f1 = f1.decode()
+ if isinstance(f2, bytes) and not isinstance(f1, bytes):
+ f1 = f1.encode()
+ self.assertEqual(f1, f2)
finally:
http.shutdown()
httpThread.join()
@@ -723,6 +745,7 @@
def testImportFtpFile(self):
'''Test importing a file over FTP'''
+ from stubserver import FTPStubServer
file = {'name':'foo', 'content':'foo bar baz qux'}
ftp = FTPStubServer(0)
ftp.run()
@@ -785,7 +808,7 @@
checksumThread = Thread(target=checksumThreadFn)
checksumThread.start()
try:
- with open(random_device) as readable:
+ with open(random_device, 'rb') as readable:
with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (writable, fileId):
for i in range(int(partSize * partsPerFile / bufSize)):
buf = readable.read(bufSize)
@@ -813,8 +836,8 @@
checksum = hashlib.md5()
fh, path = tempfile.mkstemp()
try:
- with os.fdopen(fh, 'r+') as writable:
- with open(random_device) as readable:
+ with os.fdopen(fh, 'wb+') as writable:
+ with open(random_device, 'rb') as readable:
for i in range(int(partSize * partsPerFile / bufSize)):
buf = readable.read(bufSize)
writable.write(buf)
@@ -842,11 +865,11 @@
job = self.jobstore_initialized.create(self.arbitraryJob)
nullFile = self.jobstore_initialized.writeFile('/dev/null', job.jobStoreID)
with self.jobstore_initialized.readFileStream(nullFile) as f:
- self.assertEquals(f.read(), "")
+ assert not f.read()
with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (f, nullStream):
pass
with self.jobstore_initialized.readFileStream(nullStream) as f:
- self.assertEquals(f.read(), "")
+ assert not f.read()
self.jobstore_initialized.delete(job.jobStoreID)
@slow
@@ -856,7 +879,7 @@
dirPath = self._createTempDir()
filePath = os.path.join(dirPath, 'large')
hashIn = hashlib.md5()
- with open(filePath, 'w') as f:
+ with open(filePath, 'wb') as f:
for i in range(0, 10):
buf = os.urandom(self._partSize())
f.write(buf)
@@ -874,7 +897,7 @@
# Reread the file to confirm success.
hashOut = hashlib.md5()
- with open(filePath, 'r') as f:
+ with open(filePath, 'rb') as f:
while True:
buf = f.read(self._partSize())
if not buf:
@@ -962,11 +985,15 @@
# will get blocked on the write. Technically anything
# greater than the pipe buffer size plus the libc
# buffer size (64K + 4K(?)) should trigger this bug,
- # but this gives us a lot of extra room just to be
- # sure.
- f.write('a' * 300000)
+ # but this gives us a lot of extra room just to be sure.
+
+ # python 3 requires self.fileContents to be a bytestring
+ a = 'a'
+ if sys.version_info >= (3, 0):
+ a = b'a'
+ f.write(a * 300000)
with self.jobstore_initialized.readFileStream(fileID) as f:
- self.assertEquals(f.read(1), "a")
+ self.assertEquals(f.read(1), a)
# If it times out here, there's a deadlock
@abstractmethod
@@ -1091,14 +1118,14 @@
return url
else:
content = os.urandom(size)
- with open(localFilePath, 'w') as writable:
+ with open(localFilePath, 'wb') as writable:
writable.write(content)
return url, hashlib.md5(content).hexdigest()
def _hashTestFile(self, url):
localFilePath = FileJobStore._extractPathFromUrl(urlparse.urlparse(url))
- with open(localFilePath, 'r') as f:
+ with open(localFilePath, 'rb') as f:
return hashlib.md5(f.read()).hexdigest()
def _createExternalStore(self):
@@ -1119,54 +1146,6 @@
os.unlink(path)
-@needs_google
-class GoogleJobStoreTest(AbstractJobStoreTest.Test):
- projectID = os.getenv('TOIL_GOOGLE_PROJECTID')
- headers = {"x-goog-project-id": projectID}
-
- def _createJobStore(self):
- from toil.jobStores.googleJobStore import GoogleJobStore
- return GoogleJobStore(GoogleJobStoreTest.projectID + ":" + self.namePrefix)
-
- def _corruptJobStore(self):
- # The Google job store has only one resource, the bucket, so we can't corrupt it without
- # fully deleting it.
- pass
-
- def _prepareTestFile(self, bucket, size=None):
- from toil.jobStores.googleJobStore import GoogleJobStore
- fileName = 'testfile_%s' % uuid.uuid4()
- url = 'gs://%s/%s' % (bucket.name, fileName)
- if size is None:
- return url
- with open('/dev/urandom', 'r') as readable:
- contents = readable.read(size)
- GoogleJobStore._writeToUrl(StringIO(contents), urlparse.urlparse(url))
- return url, hashlib.md5(contents).hexdigest()
-
- def _hashTestFile(self, url):
- from toil.jobStores.googleJobStore import GoogleJobStore
- contents = GoogleJobStore._getBlobFromURL(urlparse.urlparse(url)).download_as_string()
- return hashlib.md5(contents).hexdigest()
-
- @googleRetry
- def _createExternalStore(self):
- from google.cloud import storage
- bucketName = ("import-export-test-" + str(uuid.uuid4()))
- storageClient = storage.Client()
- return storageClient.create_bucket(bucketName)
-
- @googleRetry
- def _cleanUpExternalStore(self, bucket):
- # this is copied from googleJobStore.destroy
- try:
- bucket.delete(force=True)
- # throws ValueError if bucket has more than 256 objects. Then we must delete manually
- except ValueError:
- bucket.delete_blobs(bucket.list_blobs)
- bucket.delete()
-
-
@needs_aws
class AWSJobStoreTest(AbstractJobStoreTest.Test):
@@ -1448,6 +1427,9 @@
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", len(self.fileContents))
self.end_headers()
+ # python 3 requires self.fileContents to be a bytestring
+ if sys.version_info >= (3, 0):
+ self.fileContents = self.fileContents.encode('utf-8')
self.wfile.write(self.fileContents)
--- toil.orig/src/toil/test/src/fileStoreTest.py
+++ toil/src/toil/test/src/fileStoreTest.py
@@ -1338,10 +1338,13 @@
"""
for name, kind, clazz, value in inspect.classify_class_attrs(cls):
if kind == 'static method':
- method = value.__func__
- args = inspect.getargspec(method).args
- if args and args[0] == 'job':
- globals()[name] = method
+ method = value
+ try:
+ args = inspect.getargspec(method).args
+ if args and args[0] == 'job':
+ globals()[name] = method
+ except TypeError:
+ pass
_exportStaticMethodAsGlobalFunctions(hidden.AbstractFileStoreTest)
--- toil.orig/src/toil/test/src/jobFileStoreTest.py
+++ toil/src/toil/test/src/jobFileStoreTest.py
@@ -134,13 +134,24 @@
with open(tempFile, 'w') as fH:
fH.write(testString)
#Write a local copy of the file using the local file
- outputFileStoreIds.append(job.fileStore.writeGlobalFile(tempFile))
+ fileStoreID = job.fileStore.writeGlobalFile(tempFile)
+
+ # Make sure it returned a valid and correct FileID with the right size
+ assert isinstance(fileStoreID, FileID)
+ assert fileStoreID.size == len(testString.encode('utf-8'))
+
+ outputFileStoreIds.append(fileStoreID)
else:
#Use the writeGlobalFileStream method to write the file
with job.fileStore.writeGlobalFileStream() as (fH, fileStoreID):
fH.write(testString.encode('utf-8'))
outputFileStoreIds.append(fileStoreID)
+
+ #Make sure it returned a valid and correct FileID with the right size
+ assert isinstance(fileStoreID, FileID)
+ assert fileStoreID.size == len(testString.encode('utf-8'))
+
if chainLength > 0:
#Make a child that will read these files and check it gets the same results
job.addChildJobFn(fileTestJob, outputFileStoreIds, testStrings, chainLength-1)
debian/patches/series
View file @
bda14bcc
...
...
@@ -5,3 +5,4 @@ no_galaxy_lib
debianize_docs
spelling
adjust_to_newer_cwltool
fix_tests
debian/rules
View file @
bda14bcc
...
...
@@ -19,6 +19,13 @@ override_dh_auto_install:
dh_auto_install
find $(CURDIR)/debian -name cwltoil -delete
# more py3 test fixes need to be cherry-picked from upstream
# override_dh_auto_test:
# PYBUILD_SYSTEM=custom \
# PYBUILD_TEST_ARGS='TOIL_SKIP_DOCKER=True {interpreter} -m pytest -vv \
# -W ignore \
# -k "not test_bioconda and not test_run_conformance and not testImportFtpFile" -n auto\
{dir}/src/toil/test' dh_auto_test --buildsystem pybuild
# If you need to rebuild the Sphinx documentation
# Add spinxdoc to the dh --with line
...
...