Skip to content
Commits on Source (8)
......@@ -10,6 +10,10 @@ services:
branches:
except:
- "/^feature.*$/"
addons:
apt:
packages:
- libgnutls-dev
install:
- pip install -r requirements.txt
- pip install coverage nose
......
3.0.27:
Fix previous release broken with a bug in direct protocols
3.0.26:
Change default download timeout to 1h
#12 Allow FTPS protocol
#14 Add mechanism for protocol specific options
3.0.25:
Allow to use hardlinks in LocalDownload
3.0.24:
Remove debug logs
3.0.23:
Support spaces in remote file names
3.0.22:
Fix **/* remote.files parsing
3.0.21:
Fix traefik labels
3.0.20:
......
# About
[![PyPI version](https://badge.fury.io/py/biomaj-download.svg)](https://badge.fury.io/py/biomaj-download)
Microservice to manage the downloads of biomaj.
A protobuf interface is available in biomaj_download/message/message_pb2.py to exchange messages between BioMAJ and the download service.
......@@ -9,7 +11,7 @@ Messages go through RabbitMQ (to be installed).
To compile protobuf, in biomaj_download/message:
protoc --python_out=. message.proto
protoc --python_out=. downmessage.proto
# Development
......
import pycurl
import re
import os
from datetime import datetime
import time
from datetime import datetime
import stat
import hashlib
import ftputil
from biomaj_core.utils import Utils
from biomaj_download.download.interface import DownloadInterface
......@@ -13,6 +15,48 @@ try:
except ImportError:
from StringIO import StringIO as BytesIO
# We use stat.filemode to convert from mode octal value to string.
# In python < 3.3, stat.filmode is not defined.
# This code is copied from the current implementation of stat.filemode.
if 'filemode' not in stat.__dict__:
_filemode_table = (
((stat.S_IFLNK, "l"), # noqa: E241
(stat.S_IFREG, "-"), # noqa: E241
(stat.S_IFBLK, "b"), # noqa: E241
(stat.S_IFDIR, "d"), # noqa: E241
(stat.S_IFCHR, "c"), # noqa: E241
(stat.S_IFIFO, "p")), # noqa: E241
((stat.S_IRUSR, "r"),), # noqa: E241
((stat.S_IWUSR, "w"),), # noqa: E241
((stat.S_IXUSR | stat.S_ISUID, "s"), # noqa: E241
(stat.S_ISUID, "S"), # noqa: E241
(stat.S_IXUSR, "x")), # noqa: E241
((stat.S_IRGRP, "r"),), # noqa: E241
((stat.S_IWGRP, "w"),), # noqa: E241
((stat.S_IXGRP | stat.S_ISGID, "s"), # noqa: E241
(stat.S_ISGID, "S"), # noqa: E241
(stat.S_IXGRP, "x")), # noqa: E241
((stat.S_IROTH, "r"),), # noqa: E241
((stat.S_IWOTH, "w"),), # noqa: E241
((stat.S_IXOTH | stat.S_ISVTX, "t"), # noqa: E241
(stat.S_ISVTX, "T"), # noqa: E241
(stat.S_IXOTH, "x")) # noqa: E241
)
def _filemode(mode):
"""Convert a file's mode to a string of the form '-rwxrwxrwx'."""
perm = []
for table in _filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
stat.filemode = _filemode
class FTPDownload(DownloadInterface):
'''
......@@ -25,6 +69,12 @@ class FTPDownload(DownloadInterface):
remote.files=^alu.*\\.gz$
'''
# Utilities to parse ftp listings: UnixParser is the more common hence we
# put it first
ftp_listing_parsers = [
ftputil.stat.UnixParser(),
ftputil.stat.MSParser(),
]
def __init__(self, protocol, host, rootdir):
DownloadInterface.__init__(self)
......@@ -34,6 +84,25 @@ class FTPDownload(DownloadInterface):
self.rootdir = rootdir
self.url = url
self.headers = {}
# Initialize options
# Should we skip SSL verification (cURL -k/--insecure option)
self.ssl_verifyhost = True
self.ssl_verifypeer = True
# Path to the certificate of the server (cURL --cacert option; PEM format)
self.ssl_server_cert = None
# Keep alive
self.tcp_keepalive = 0
def set_options(self, protocol_options):
super(FTPDownload, self).set_options(protocol_options)
if "ssl_verifyhost" in protocol_options:
self.ssl_verifyhost = Utils.to_bool(protocol_options["ssl_verifyhost"])
if "ssl_verifypeer" in protocol_options:
self.ssl_verifypeer = Utils.to_bool(protocol_options["ssl_verifypeer"])
if "ssl_server_cert" in protocol_options:
self.ssl_server_cert = protocol_options["ssl_server_cert"]
if "tcp_keepalive" in protocol_options:
self.tcp_keepalive = Utils.to_int(protocol_options["tcp_keepalive"])
def match(self, patterns, file_list, dir_list=None, prefix='', submatch=False):
'''
......@@ -63,19 +132,21 @@ class FTPDownload(DownloadInterface):
if subdir == '^':
subdirs_pattern = subdirs_pattern[1:]
subdir = subdirs_pattern[0]
for direlt in dir_list:
subdir = direlt['name']
self.logger.debug('Download:File:Subdir:Check:' + subdir)
# If getting all, get all files
if pattern == '**/*':
(subfile_list, subdirs_list) = self.list(prefix + '/' + subdir + '/')
self.match([pattern], subfile_list, subdirs_list, prefix + '/' + subdir, True)
for rfile in file_list:
if pattern == '**/*' or re.match(pattern, rfile['name']):
rfile['root'] = self.rootdir
if prefix != '':
rfile['name'] = prefix + '/' + rfile['name']
self.files_to_download.append(rfile)
self.logger.debug('Download:File:MatchRegExp:' + rfile['name'])
for direlt in dir_list:
subdir = direlt['name']
self.logger.debug('Download:File:Subdir:Check:' + subdir)
if pattern == '**/*':
(subfile_list, subdirs_list) = self.list(prefix + '/' + subdir + '/')
self.match([pattern], subfile_list, subdirs_list, prefix + '/' + subdir, True)
else:
if re.match(subdirs_pattern[0], subdir):
self.logger.debug('Download:File:Subdir:Match:' + subdir)
......@@ -101,6 +172,27 @@ class FTPDownload(DownloadInterface):
while(error is True and nbtry < 3):
fp = open(file_path, "wb")
curl = pycurl.Curl()
# Configure TCP keepalive
if self.tcp_keepalive:
curl.setopt(pycurl.TCP_KEEPALIVE, True)
curl.setopt(pycurl.TCP_KEEPIDLE, self.tcp_keepalive * 2)
curl.setopt(pycurl.TCP_KEEPINTVL, self.tcp_keepalive)
# Configure SSL verification (on some platforms, disabling
# SSL_VERIFYPEER implies disabling SSL_VERIFYHOST so we set
# SSL_VERIFYPEER after)
curl.setopt(pycurl.SSL_VERIFYHOST, 2 if self.ssl_verifyhost else 0)
curl.setopt(pycurl.SSL_VERIFYPEER, 1 if self.ssl_verifypeer else 0)
if self.ssl_server_cert:
# cacert is the name of the option for the curl command. The
# corresponding cURL option is CURLOPT_CAINFO.
# See https://curl.haxx.se/libcurl/c/CURLOPT_CAINFO.html
# This is inspired by that https://curl.haxx.se/docs/sslcerts.html
# (section "Certificate Verification", option 2) but the option
# CURLOPT_CAPATH is for a directory of certificates.
curl.setopt(pycurl.CAINFO, self.ssl_server_cert)
try:
curl.setopt(pycurl.URL, file_to_download)
except Exception:
......@@ -133,8 +225,7 @@ class FTPDownload(DownloadInterface):
nbtry += 1
curl.close()
fp.close()
skip_check_uncompress = os.environ.get('UNCOMPRESS_SKIP_CHECK', None)
if not error and skip_check_uncompress is None:
if not error and not self.skip_check_uncompress:
archive_status = Utils.archive_check(file_path)
if not archive_status:
self.logger.error('Archive is invalid or corrupted, deleting file and retrying download')
......@@ -231,6 +322,18 @@ class FTPDownload(DownloadInterface):
'''
self.logger.debug('Download:List:' + self.url + self.rootdir + directory)
# Configure TCP keepalive
if self.tcp_keepalive:
self.crl.setopt(pycurl.TCP_KEEPALIVE, True)
self.crl.setopt(pycurl.TCP_KEEPIDLE, self.tcp_keepalive * 2)
self.crl.setopt(pycurl.TCP_KEEPINTVL, self.tcp_keepalive)
# See the corresponding lines in method:`curl_download`
self.crl.setopt(pycurl.SSL_VERIFYHOST, 2 if self.ssl_verifyhost else 0)
self.crl.setopt(pycurl.SSL_VERIFYPEER, 1 if self.ssl_verifypeer else 0)
if self.ssl_server_cert:
self.crl.setopt(pycurl.CAINFO, self.ssl_server_cert)
try:
self.crl.setopt(pycurl.URL, self.url + self.rootdir + directory)
except Exception:
......@@ -252,6 +355,7 @@ class FTPDownload(DownloadInterface):
# Download should not take more than 5minutes
self.crl.setopt(pycurl.TIMEOUT, self.timeout)
self.crl.setopt(pycurl.NOSIGNAL, 1)
try:
self.crl.perform()
except Exception as e:
......@@ -282,40 +386,42 @@ class FTPDownload(DownloadInterface):
rdirs = []
for line in lines:
rfile = {}
# lets print each part separately
parts = line.split()
# the individual fields in this list of parts
if not parts:
# Skip empty lines (usually the last)
if not line:
continue
rfile['permissions'] = parts[0]
rfile['group'] = parts[2]
rfile['user'] = parts[3]
rfile['size'] = int(parts[4])
rfile['month'] = Utils.month_to_num(parts[5])
rfile['day'] = int(parts[6])
rfile['hash'] = hashlib.md5(line.encode('utf-8')).hexdigest()
# Parse the line
for i, parser in enumerate(self.ftp_listing_parsers, 1):
try:
rfile['year'] = int(parts[7])
except Exception:
# specific ftp case issues at getting date info
curdate = datetime.now()
rfile['year'] = curdate.year
# Year not precised, month feater than current means previous year
if rfile['month'] > curdate.month:
rfile['year'] = curdate.year - 1
# Same month but later day => previous year
if rfile['month'] == curdate.month and rfile['day'] > curdate.day:
rfile['year'] = curdate.year - 1
rfile['name'] = parts[8]
if len(parts) >= 10 and parts[9] == '->':
# Symlink, add to files AND dirs as we don't know the type of the link
rdirs.append(rfile)
is_dir = False
if re.match('^d', rfile['permissions']):
is_dir = True
stats = parser.parse_line(line)
break
except ftputil.error.ParserError:
# If it's the last parser, re-raise the exception
if i == len(self.ftp_listing_parsers):
raise
else:
continue
# Put stats in a dict
rfile = {}
rfile['name'] = stats._st_name
# Reparse mode to a string
rfile['permissions'] = stat.filemode(stats.st_mode)
rfile['group'] = stats.st_gid
rfile['user'] = stats.st_uid
rfile['size'] = stats.st_size
mtime = time.localtime(stats.st_mtime)
rfile['year'] = mtime.tm_year
rfile['month'] = mtime.tm_mon
rfile['day'] = mtime.tm_mday
rfile['hash'] = hashlib.md5(line.encode('utf-8')).hexdigest()
is_link = stat.S_ISLNK(stats.st_mode)
is_dir = stat.S_ISDIR(stats.st_mode)
# Append links to dirs and files since we don't know what the
# target is
if is_link:
rfiles.append(rfile)
rdirs.append(rfile)
else:
if not is_dir:
rfiles.append(rfile)
else:
......
......@@ -4,6 +4,8 @@ import datetime
import time
import re
from biomaj_core.utils import Utils
class _FakeLock(object):
'''
......@@ -39,7 +41,7 @@ class DownloadInterface(object):
self.kill_received = False
self.proxy = None
# 24h timeout
self.timeout = 3600 * 24
self.timeout = 3600
# Optional save target for single file downloaders
self.save_as = None
self.logger = logging.getLogger('biomaj')
......@@ -48,6 +50,9 @@ class DownloadInterface(object):
self.protocol = None
self.server = None
self.offline_dir = None
# Options
self.protocol_options = {}
self.skip_check_uncompress = False
def set_offline_dir(self, offline_dir):
self.offline_dir = offline_dir
......@@ -266,6 +271,17 @@ class DownloadInterface(object):
'''
self.credentials = userpwd
def set_options(self, protocol_options):
"""
Set protocol specific options.
Subclasses that override this method must call the
parent implementation.
"""
self.protocol_options = protocol_options
if "skip_check_uncompress" in protocol_options:
self.skip_check_uncompress = Utils.to_bool(protocol_options["skip_check_uncompress"])
def close(self):
'''
Close connection
......
......@@ -18,10 +18,11 @@ class LocalDownload(DownloadInterface):
'''
def __init__(self, rootdir):
def __init__(self, rootdir, use_hardlinks=False):
DownloadInterface.__init__(self)
self.logger.debug('Download')
self.rootdir = rootdir
self.use_hardlinks = use_hardlinks
def download(self, local_dir):
'''
......@@ -32,7 +33,9 @@ class LocalDownload(DownloadInterface):
:return: list of downloaded files
'''
self.logger.debug('Local:Download')
Utils.copy_files(self.files_to_download, local_dir, lock=self.mkdir_lock)
Utils.copy_files(self.files_to_download, local_dir,
use_hardlinks=self.use_hardlinks,
lock=self.mkdir_lock)
for rfile in self.files_to_download:
rfile['download_time'] = 0
......
......@@ -80,7 +80,8 @@ class DownloadClient(DownloadService):
result = r.json()
return (result['progress'], result['errors'])
except Exception:
logging.exception('Failed to connect to the download proxy: %s' % (url))
logging.exception('Failed to connect to the download proxy: %s, retrying in 2 seconds' % (url))
time.sleep(2)
raise Exception('Failed to connect to the download proxy')
def download_remote_files(self, cf, downloaders, offline_dir):
......
......@@ -28,6 +28,14 @@ app = Flask(__name__)
app_log = logging.getLogger('werkzeug')
app_log.setLevel(logging.ERROR)
# Classify protocols from downmessage.proto
# Note: those lists are based on the protocol numbers, not the protocol names
ALL_PROTOCOLS = [item for key, item in downmessage_pb2.DownloadFile.Protocol.items()]
DIRECT_PROTOCOLS = [
item for key, item in downmessage_pb2.DownloadFile.Protocol.items()
if key.startswith("DIRECT")
]
@app.route('/api/download-message')
def ping():
......@@ -122,7 +130,8 @@ class DownloadService(object):
def get_handler(self, protocol_name, server, remote_dir, remote_files=[],
credentials=None, http_parse=None, http_method=None, param=None,
proxy=None, proxy_auth='',
save_as=None, timeout_download=None, offline_dir=None):
save_as=None, timeout_download=None, offline_dir=None,
protocol_options={}):
protocol = downmessage_pb2.DownloadFile.Protocol.Value(protocol_name.upper())
downloader = None
if protocol in [0, 1]:
......@@ -133,6 +142,8 @@ class DownloadService(object):
downloader = LocalDownload(remote_dir)
if protocol == 4:
downloader = DirectFTPDownload('ftp', server, '/')
if protocol == 10:
downloader = DirectFTPDownload('ftps', server, '/')
if protocol == 5:
downloader = DirectHttpDownload('http', server, '/')
if protocol == 6:
......@@ -147,8 +158,8 @@ class DownloadService(object):
for remote_file in remote_files:
if remote_file['save_as']:
save_as = remote_file['save_as']
# For direct protocol, we only keep base name
if protocol in [4, 5, 6]:
# For direct protocols, we only keep base name
if protocol in DIRECT_PROTOCOLS:
tmp_remote = []
for remote_file in remote_files:
tmp_remote.append(remote_file['name'])
......@@ -178,6 +189,10 @@ class DownloadService(object):
downloader.set_protocol(protocol_name)
if protocol_options is not None:
self.logger.debug("Received protocol options: " + str(protocol_options))
downloader.set_options(protocol_options)
downloader.logger = self.logger
downloader.set_files_to_download(remote_files)
return downloader
......@@ -226,7 +241,9 @@ class DownloadService(object):
proxy_auth=proxy_auth,
save_as=biomaj_file_info.remote_file.save_as,
timeout_download=biomaj_file_info.timeout_download,
offline_dir=biomaj_file_info.local_dir)
offline_dir=biomaj_file_info.local_dir,
protocol_options=biomaj_file_info.protocol_options
)
def clean(self, biomaj_file_info=None):
'''
......
syntax = "proto2";
package biomaj.download;
message File {
......@@ -64,7 +66,7 @@ message DownloadFile {
enum Protocol {
FTP = 0;
SFTP = 1;
FTPS = 1;
HTTP = 2;
HTTPS = 3;
DIRECTFTP = 4;
......@@ -73,6 +75,7 @@ message DownloadFile {
LOCAL = 7;
RSYNC = 8;
IRODS = 9;
DIRECTFTPS = 10;
}
message Param {
......@@ -120,4 +123,6 @@ message DownloadFile {
optional HTTP_METHOD http_method = 8 [ default = GET];
map<string, string> protocol_options = 9;
}
biomaj3-download (3.0.21-2) UNRELEASED; urgency=medium
biomaj3-download (3.0.27-1) UNRELEASED; urgency=medium
[ TODO ]
* Remove d/patches on irods and add python3-irodsclient as
dependency on package when available in repo (in NEW queue)
[ PENDING ]
Needs python3-ftputil, in NEW queue
-- Olivier Sallou <osallou@debian.org> Sat, 09 Mar 2019 13:37:25 +0000
[ Olivier Sallou ]
* New upstream release
-- Olivier Sallou <osallou@debian.org> Wed, 16 Oct 2019 13:17:33 +0000
biomaj3-download (3.0.21-1) unstable; urgency=medium
......
......@@ -23,8 +23,9 @@ Build-Depends: debhelper (>= 12~),
python3-requests,
python3-setuptools,
python3-yaml,
python3-biomaj3-core,
python3-biomaj3-zipkin
python3-biomaj3-core (>= 3.0.19),
python3-biomaj3-zipkin,
python3-ftputil
Standards-Version: 4.3.0
Vcs-Browser: https://salsa.debian.org/med-team/biomaj3-download
Vcs-Git: https://salsa.debian.org/med-team/biomaj3-download.git
......
#! /usr/bin/make -f
export DEB_BUILD_OPTIONS=nocheck
#export DEB_BUILD_OPTIONS=nocheck
export DEB_BUILD_MAINT_OPTIONS=nocheck
export PYBUILD_NAME=biomaj-download
%:
......@@ -13,3 +14,6 @@ override_dh_auto_build:
override_dh_install:
dh_install
sed -i '1s;^;#!/usr/bin/python3\n;' debian/python3-biomaj3-download/usr/bin/biomaj_download_consumer.py
override_dh_auto_test:
nosetests3 -a !network
......@@ -17,11 +17,12 @@ with open(os.path.join(here, 'CHANGES.txt')) as f:
config = {
'description': 'BioMAJ download service',
'long_description': README + '\n\n' + CHANGES,
'long_description_content_type': 'text/markdown',
'author': 'Olivier Sallou',
'url': 'http://biomaj.genouest.org',
'download_url': 'http://biomaj.genouest.org',
'author_email': 'olivier.sallou@irisa.fr',
'version': '3.0.21',
'version': '3.0.27',
'classifiers': [
# How mature is this project? Common values are
# 3 - Alpha
......@@ -45,6 +46,7 @@ config = {
'biomaj_core',
'biomaj_zipkin',
'pycurl',
'ftputil',
'py-bcrypt',
'pika==0.13.0',
'redis',
......
......@@ -4,6 +4,7 @@ from nose.plugins.attrib import attr
import json
import shutil
import os
import sys
import tempfile
import logging
import copy
......@@ -213,6 +214,30 @@ class TestBiomajLocalDownload(unittest.TestCase):
locald.close()
self.assertTrue(len(locald.files_to_download) == 1)
def test_local_download_hardlinks(self):
"""
Test download with hardlinks: we download a file from conf/ to data_dir.
This should work unless /tmp don't accept hardlinks so the last assert is
optional.
"""
test_file = "conf/global.properties"
locald = LocalDownload(self.utils.test_dir, use_hardlinks=True)
(file_list, dir_list) = locald.list()
locald.match([r'^/' + test_file + '$'], file_list, dir_list)
locald.download(self.utils.data_dir)
locald.close()
self.assertTrue(len(locald.files_to_download) == 1)
# Test if data/conf/global.properties is a hard link to
# conf/global.properties
local_global_properties = os.path.join(self.utils.test_dir, test_file)
copy_global_properties = os.path.join(self.utils.data_dir, test_file)
try:
self.assertTrue(
os.path.samefile(local_global_properties, copy_global_properties)
)
except Exception:
msg = "In %s: copy worked but hardlinks were not used." % self.id()
logging.info(msg)
@attr('network')
@attr('http')
......@@ -337,6 +362,40 @@ class TestBiomajDirectFTPDownload(unittest.TestCase):
self.assertTrue(os.path.exists(os.path.join(self.utils.data_dir,'mailing-lists.txt')))
@attr('directftps')
@attr('network')
class TestBiomajDirectFTPSDownload(unittest.TestCase):
"""
Test DirectFTP downloader with FTPS.
"""
def setUp(self):
self.utils = UtilsForTest()
def tearDown(self):
self.utils.clean()
def test_ftps_list(self):
file_list = ['/readme.txt']
ftpd = DirectFTPDownload('ftps', 'test.rebex.net', '')
ftpd.set_credentials('demo:password')
ftpd.set_files_to_download(file_list)
(file_list, dir_list) = ftpd.list()
ftpd.close()
self.assertTrue(len(file_list) == 1)
def test_download(self):
file_list = ['/readme.txt']
ftpd = DirectFTPDownload('ftps', 'test.rebex.net', '')
ftpd.set_credentials('demo:password')
ftpd.set_files_to_download(file_list)
(file_list, dir_list) = ftpd.list()
ftpd.download(self.utils.data_dir, False)
ftpd.close()
self.assertTrue(os.path.exists(os.path.join(self.utils.data_dir,'readme.txt')))
@attr('directhttp')
@attr('network')
class TestBiomajDirectHTTPDownload(unittest.TestCase):
......@@ -440,24 +499,27 @@ class TestBiomajFTPDownload(unittest.TestCase):
ftpd = FTPDownload('ftp', 'speedtest.tele2.net', '/')
(file_list, dir_list) = ftpd.list()
ftpd.match([r'^1.*KB\.zip$'], file_list, dir_list)
# This tests fails because the zip file is fake. We intercept the failure
# and continue.
# See test_download_skip_uncompress_checks
try:
ftpd.download(self.utils.data_dir)
except Exception:
self.assertTrue(1==1)
else:
self.assertTrue(1==0)
# In case it works, this is the real assertion
self.assertTrue(len(ftpd.files_to_download) == 2)
ftpd.close()
# self.assertTrue(len(ftpd.files_to_download) == 2)
def test_download_skip_uncompress_checks(self):
os.environ['UNCOMPRESS_SKIP_CHECK'] = "1"
def test_download_skip_checks_uncompress(self):
# This test is similar to test_download but we skip test of zip file.
ftpd = FTPDownload('ftp', 'speedtest.tele2.net', '/')
ftpd.set_options(dict(skip_check_uncompress=True))
(file_list, dir_list) = ftpd.list()
ftpd.match([r'^1.*KB\.zip$'], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 2)
del os.environ['UNCOMPRESS_SKIP_CHECK']
def test_download_in_subdir(self):
ftpd = FTPDownload('ftp', 'ftp.fr.debian.org', '/debian/')
......@@ -503,6 +565,104 @@ class TestBiomajFTPDownload(unittest.TestCase):
self.assertTrue(release['month']=='11')
self.assertTrue(release['day']=='12')
def test_ms_server(self):
ftpd = FTPDownload("ftp", "test.rebex.net", "/")
ftpd.set_credentials("demo:password")
(file_list, dir_list) = ftpd.list()
ftpd.match(["^readme.txt$"], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 1)
def test_download_tcp_keepalive(self):
"""
Test setting tcp_keepalive (it probably doesn't change anything here but
we test that there is no obvious mistake in the code).
"""
ftpd = FTPDownload("ftp", "test.rebex.net", "/")
ftpd.set_options(dict(tcp_keepalive=10))
ftpd.set_credentials("demo:password")
(file_list, dir_list) = ftpd.list()
ftpd.match(["^readme.txt$"], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 1)
@attr('ftps')
@attr('network')
class TestBiomajFTPSDownload(unittest.TestCase):
"""
Test FTP downloader with FTPS.
"""
PROTOCOL = "ftps"
def setUp(self):
self.utils = UtilsForTest()
def tearDown(self):
self.utils.clean()
def test_ftps_list(self):
ftpd = FTPDownload(self.PROTOCOL, "test.rebex.net", "/")
ftpd.set_credentials("demo:password")
(file_list, dir_list) = ftpd.list()
ftpd.close()
self.assertTrue(len(file_list) == 1)
def test_download(self):
ftpd = FTPDownload(self.PROTOCOL, "test.rebex.net", "/")
ftpd.set_credentials("demo:password")
(file_list, dir_list) = ftpd.list()
ftpd.match([r'^readme.txt$'], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 1)
def test_ftps_list_no_ssl(self):
# This server is misconfigured hence we disable all SSL verification
SERVER = "demo.wftpserver.com"
DIRECTORY = "/download/"
CREDENTIALS = "demo-user:demo-user"
ftpd = FTPDownload(self.PROTOCOL, SERVER, DIRECTORY)
ftpd.set_options(dict(ssl_verifyhost="False", ssl_verifypeer="False"))
ftpd.set_credentials(CREDENTIALS)
(file_list, dir_list) = ftpd.list()
ftpd.close()
self.assertTrue(len(file_list) > 1)
def test_download_no_ssl(self):
# This server is misconfigured hence we disable all SSL verification
SERVER = "demo.wftpserver.com"
DIRECTORY = "/download/"
CREDENTIALS = "demo-user:demo-user"
ftpd = FTPDownload(self.PROTOCOL, SERVER, DIRECTORY)
ftpd.set_options(dict(ssl_verifyhost="False", ssl_verifypeer="False"))
ftpd.set_credentials(CREDENTIALS)
(file_list, dir_list) = ftpd.list()
ftpd.match([r'^manual_en.pdf$'], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 1)
def test_download_ssl_certficate(self):
# This server is misconfigured but we use its certificate
# The hostname is wrong so we disable host verification
SERVER = "demo.wftpserver.com"
DIRECTORY = "/download/"
CREDENTIALS = "demo-user:demo-user"
ftpd = FTPDownload(self.PROTOCOL, SERVER, DIRECTORY)
curdir = os.path.dirname(os.path.realpath(__file__))
cert_file = os.path.join(curdir, "caert.demo.wftpserver.com.pem")
ftpd.set_options(dict(ssl_verifyhost="False", ssl_server_cert=cert_file))
ftpd.set_credentials(CREDENTIALS)
(file_list, dir_list) = ftpd.list()
ftpd.match([r'^manual_en.pdf$'], file_list, dir_list)
ftpd.download(self.utils.data_dir)
ftpd.close()
self.assertTrue(len(ftpd.files_to_download) == 1)
@attr('rsync')
@attr('local')
class TestBiomajRSYNCDownload(unittest.TestCase):
......
-----BEGIN CERTIFICATE-----
MIIDszCCApugAwIBAgIBADANBgkqhkiG9w0BAQsFADCBnDEYMBYGA1UEAwwPV2lu
ZyBGVFAgU2VydmVyMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTlkxCzAJBgNVBAcM
Ak5NMRgwFgYDVQQKDA9XaW5nIEZUUCBTZXJ2ZXIxGDAWBgNVBAsMD1dpbmcgRlRQ
IFNlcnZlcjElMCMGCSqGSIb3DQEJARYWc3VwcG9ydEB3ZnRwc2VydmVyLmNvbTAe
Fw0xNjEwMDcxNjI3MDZaFw0yNjEwMDUxNjI3MDZaMIGcMRgwFgYDVQQDDA9XaW5n
IEZUUCBTZXJ2ZXIxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOWTELMAkGA1UEBwwC
Tk0xGDAWBgNVBAoMD1dpbmcgRlRQIFNlcnZlcjEYMBYGA1UECwwPV2luZyBGVFAg
U2VydmVyMSUwIwYJKoZIhvcNAQkBFhZzdXBwb3J0QHdmdHBzZXJ2ZXIuY29tMIIB
IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4qCiFwqgJX9EFvf18bNL4aGl
lFOpzGTbyS5AKiDwdf6dEcJd0p9s8PD2So1g+wYmAMD1aUAn9yyvU11oQ5gy+T3P
ZaPes1bH4ugnq8inwzgy46wP4eN8CJzrxZvMAkdh/UbNiH8GLELR3Pex1BfrMlkN
iO9STcMz7hVA2YhH59eolEJlsqTOSCgaXbCaDcQpof/Hbz/GtLu34x2LpA6GEvtr
78gyuU8MPakISDyXAkaOr2KpJEabsq2xqvJTZUZJHAjFk3DREUYlLbY4HF0KjqH1
VZtJcerBjNszHTrgR7DMy6FIMFnlF9jG0sMkG0kAYu55dqoMEiCTXLpgQWyoEwID
AQABMA0GCSqGSIb3DQEBCwUAA4IBAQAvvnwJrqczqqow20eL77voXn9aTqbex/0C
8kSTVetrThCh8sO+GH507fW4PkyxFfulosSRY18Bj17dVOILMbh959y7PkTWcNA1
I5NxuU0lC2Ctc6sO6WtnKHh3nQaJKYix0CTwN4ZFDeBDWkbT+aqiCDzWDiAvUOaO
wgOvkWaGy+6rB8fT/mcRaK2BH7H374tk5KqPrQwlVl0d/y+lBrp0ISebC/aKV9UE
CqOXL36u0MdNINY/p/wH6aHfrcSe9EVTg7Euw5uq5wmMqrdUf9DyEtY2N18ShBZD
f2c8ZXeb7abPkgef3cbwMHrqQ8ADiQqaLngazlNXU/a7/C1M0Llx
-----END CERTIFICATE-----
......@@ -121,3 +121,13 @@ formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
#-----------------
# Protocol options
#-----------------
# Set options
options.name=skip_check_uncompress
# Don't skip the test of compressed files
options.skip_check_uncompress=1