Skip to content
Commits on Source (9)
3.1.4:
Fix #88 Unset 'last_update_session' when found in pending sessions using --remove-pending
Add formats in bank info request
Add checks for some production fields before display
Add irods download support
3.1.3:
Remove post-install step for automatic upgrades, not supported by wheel package
......
......@@ -8,7 +8,6 @@ import json
from datetime import datetime
import redis
from influxdb import InfluxDBClient
from biomaj.mongo_connector import MongoConnector
from biomaj.session import Session
......@@ -164,7 +163,7 @@ class Bank(object):
str(last_update),
str(release)])
# Bank production info header
prod_info.append(["Session", "Remote release", "Release", "Directory", "Freeze"])
prod_info.append(["Session", "Remote release", "Release", "Directory", "Freeze", "Format(s)"])
for prod in _bank['production']:
data_dir = self.config.get('data.dir')
dir_version = self.config.get('dir.version')
......@@ -172,20 +171,32 @@ class Bank(object):
data_dir = prod['data.dir']
if 'dir.version' in prod:
dir_version = prod['dir.version']
if not prod['prod_dir'] or not dir_version or not data_dir:
continue
release_dir = os.path.join(data_dir,
dir_version,
prod['prod_dir'])
date = datetime.fromtimestamp(prod['session']).strftime('%Y-%m-%d %H:%M:%S')
formats = ""
# Check the value exist , is not empty, and a list.
if 'formats' in prod and prod['formats'] and isinstance(prod['formats'], list):
formats = str(','.join(prod['formats']))
prod_info.append([date,
prod['remoterelease'],
prod['release'],
release_dir,
'yes' if 'freeze' in prod and prod['freeze'] else 'no'])
'yes' if 'freeze' in prod and prod['freeze'] else 'no',
formats])
# Bank pending info header
if 'pending' in _bank and len(_bank['pending']) > 0:
pend_info.append(["Pending release", "Last run"])
for pending in _bank['pending']:
run = ""
try:
run = datetime.fromtimestamp(pending['id']).strftime('%Y-%m-%d %H:%M:%S')
except Exception as e:
logging.error('BANK:ERROR:invalid pending id: ' + str(pending['id']))
logging.error('BANK:ERROR:invalid pending id: ' + str(e))
pend_info.append([pending['release'], run])
info['info'] = bank_info
......@@ -959,6 +970,9 @@ class Bank(object):
if 'pending' not in self.bank:
return True
pendings = self.bank['pending']
last_update = None
if 'last_update_session' in self.bank:
last_update = self.bank['last_update_session']
for pending in pendings:
# Only work with pending for argument release
......@@ -979,6 +993,10 @@ class Bank(object):
logging.debug("Remove:Pending:Dir:" + session.get_full_release_directory())
shutil.rmtree(session.get_full_release_directory())
self.remove_session(pending['id'])
if last_update and last_update == pending_session_id:
self.banks.update({'name': self.name},
{'$unset': {'last_update_session': ''}})
# If no release ask for deletion, remove all pending
if not release:
self.banks.update({'name': self.name}, {'$set': {'pending': []}})
......@@ -1097,6 +1115,11 @@ class Bank(object):
'''
Send stats to Influxdb if enabled
'''
try:
from influxdb import InfluxDBClient
except Exception as e:
logging.error('Cannot load influxdb library' + str(e))
return
db_host = self.config.get('influxdb.host', default=None)
if not db_host:
return
......
......@@ -193,6 +193,10 @@ class MetaProcess(threading.Thread):
# bank_env=None, log_dir=None,
# rabbit_mq=None, rabbit_mq_port=5672, rabbit_mq_user=None, rabbit_mq_password=None, rabbit_mq_virtualhost=None,
# proxy=None, bank=None):
proxy = self.bank.config.get('micro.biomaj.proxy.process')
if not proxy:
proxy = self.bank.config.get('micro.biomaj.proxy')
use_sudo = self.bank.config.get_bool('docker.sudo', default=True)
bmaj_process = RemoteProcess(
meta + '_' + name,
......@@ -210,7 +214,7 @@ class MetaProcess(threading.Thread):
rabbit_mq_user=self.bank.config.get('micro.biomaj.rabbit_mq_user'),
rabbit_mq_password=self.bank.config.get('micro.biomaj.rabbit_mq_password'),
rabbit_mq_virtualhost=self.bank.config.get('micro.biomaj.rabbit_mq_virtualhost', default='/'),
proxy=self.bank.config.get('micro.biomaj.proxy'),
proxy=proxy,
bank=self.bank.name
)
else:
......
......@@ -549,7 +549,11 @@ class UpdateWorkflow(Workflow):
)
else:
dserv = DownloadClient()
proxy = self.bank.config.get('micro.biomaj.proxy.download')
if not proxy:
proxy = self.bank.config.get('micro.biomaj.proxy')
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_release:DownloadSession:" + str(session))
......@@ -586,13 +590,6 @@ class UpdateWorkflow(Workflow):
params = None
keys = cf.get('url.params')
if keys is not None:
params = {}
keys = keys.split(',')
for key in keys:
param = cf.get(key.strip() + '.value')
params[key.strip()] = param.strip()
credentials = cf.get('server.credentials')
if cf.get('release.credentials') is not None:
credentials = cf.get('release.credentials')
......@@ -600,12 +597,32 @@ class UpdateWorkflow(Workflow):
save_as = None
method = 'GET'
if protocol == 'directhttp' or protocol == 'directhttps' or protocol == 'directftp':
keys = cf.get('url.params')
if keys is not None:
params = {}
keys = keys.split(',')
for key in keys:
param = cf.get(key.strip() + '.value')
params[key.strip()] = param.strip()
save_as = cf.get('release.file')
remotes = [remote_dir]
remote_dir = '/'
method = cf.get('url.method')
if cf.get('release.url.method') is not None:
method = cf.get('release.url.method')
# add params for irods to get port, password, user, zone
if protocol == 'irods':
keys = None
keys = str(str(cf.get('irods.user')) + ',' + str(cf.get('irods.password')) + ',' + str(cf.get('irods.port')) + ',' + str(cf.get('irods.protocol')))
if keys is not None:
params = {}
keys = str(keys).split(',')
params['user'] = str(cf.get('irods.user')).strip()
params['password'] = str(cf.get('irods.password')).strip()
params['port'] = str(cf.get('irods.port')).strip()
params['protocol'] = str(cf.get('irods.protocol')).strip()
params['zone'] = str(cf.get('irods.zone')).strip()
release_downloader = dserv.get_handler(
protocol,
......@@ -938,7 +955,10 @@ class UpdateWorkflow(Workflow):
if pool_size:
dserv.set_queue_size(int(pool_size))
proxy = self.bank.config.get('micro.biomaj.proxy.download')
if not proxy:
proxy = self.bank.config.get('micro.biomaj.proxy')
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_download:DownloadSession:" + str(session))
......@@ -1055,14 +1075,6 @@ class UpdateWorkflow(Workflow):
server = cf.get('server')
params = None
keys = cf.get('url.params')
if keys is not None:
params = {}
keys = keys.split(',')
for key in keys:
param = cf.get(key.strip() + '.value')
params[key.strip()] = param.strip()
method = cf.get('url.method')
if method is None:
method = 'GET'
......@@ -1071,8 +1083,28 @@ class UpdateWorkflow(Workflow):
remote_dir = cf.get('remote.dir')
if protocol == 'directhttp' or protocol == 'directhttps' or protocol == 'directftp':
keys = cf.get('url.params')
if keys is not None:
params = {}
keys = keys.split(',')
for key in keys:
param = cf.get(key.strip() + '.value')
params[key.strip()] = param.strip()
remotes = [cf.get('remote.dir')[:-1]]
remote_dir = '/'
# add params for irods to get port, password, user, zone
if protocol == 'irods':
keys = None
keys = str(str(cf.get('irods.user')) + ',' + str(cf.get('irods.password')) + ',' + str(cf.get('irods.port')) + ',' + str(cf.get('irods.protocol')))
if keys is not None:
params = {}
keys = str(keys).split(',')
params['user'] = str(cf.get('irods.user')).strip()
params['password'] = str(cf.get('irods.password')).strip()
params['port'] = str(cf.get('irods.port')).strip()
params['protocol'] = str(cf.get('irods.protocol')).strip()
params['zone'] = str(cf.get('irods.zone')).strip()
save_as = cf.get('target.name')
......@@ -1311,7 +1343,10 @@ class UpdateWorkflow(Workflow):
if pool_size:
dserv.set_queue_size(int(pool_size))
proxy = self.bank.config.get('micro.biomaj.proxy.download')
if not proxy:
proxy = self.bank.config.get('micro.biomaj.proxy')
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_download:DownloadSession:" + str(session))
......@@ -1636,14 +1671,17 @@ class UpdateWorkflow(Workflow):
nb_prod = len(self.bank.bank['production'])
# save session during delete workflow
keep_session = self.bank.session
old_deleted = False
if nb_prod > keep:
for prod in self.bank.bank['production']:
if prod['release'] == keep_session.get('release'):
logging.info('Release %s tagged as keep_session, skipping' % (str(prod['release'])))
continue
if 'freeze' in prod and prod['freeze']:
logging.info('Release %s tagged as freezed, skipping' % (str(prod['release'])))
continue
if self.bank.bank['current'] == prod['session']:
logging.info('Release %s tagged as current, skipping' % (str(prod['release'])))
continue
if nb_prod - keep > 0:
nb_prod -= 1
......@@ -1670,10 +1708,14 @@ class UpdateWorkflow(Workflow):
res = self.bank.start_remove(session)
if not res:
logging.error('Workflow:wf_delete_old:ErrorDelete:' + prod['release'])
else:
old_deleted = True
else:
break
# Set session back
self.bank.session = keep_session
if old_deleted:
self.bank.session._session['remove'] = True
return True
......
biomaj3 (3.1.4-1) unstable; urgency=medium
* Team upload.
* New upstream version
* debhelper 11
* Point Vcs fields to salsa.debian.org
* Standards-Version: 4.1.4
* Testsuite: autopkgtest-pkg-python
* Fix bash path in example script
-- Andreas Tille <tille@debian.org> Fri, 08 Jun 2018 12:48:14 +0200
biomaj3 (3.1.3-1) unstable; urgency=low
* First packaging of biomaj (Closes: #872451).
......
Source: biomaj3
Section: python
Priority: optional
Maintainer: Debian Med Packaging Team <debian-med-packaging@lists.alioth.debian.org>
Uploaders: Olivier Sallou <osallou@debian.org>
Build-Depends: debhelper (>= 9), dh-python,
Section: python
Testsuite: autopkgtest-pkg-python
Priority: optional
Build-Depends: debhelper (>= 11~),
dh-python,
python3-all,
python3-bcrypt,
python3-influxdb,
......@@ -17,17 +19,22 @@ Build-Depends: debhelper (>= 9), dh-python,
python3-biomaj3-download,
python3-biomaj3-user,
python3-biomaj3-process
Standards-Version: 4.0.0
Standards-Version: 4.1.4
Vcs-Browser: https://salsa.debian.org/med-team/biomaj3
Vcs-Git: https://salsa.debian.org/med-team/biomaj3.git
Homepage: https://github.com/genouest/biomaj
Vcs-Browser: https://anonscm.debian.org/cgit/debian-med/biomaj3.git
Vcs-Git: https://anonscm.debian.org/git/debian-med/biomaj3.git
Package: python3-biomaj3
Architecture: all
Depends: ${misc:Depends}, ${python3:Depends}, unzip
Recommends: ${python3:Recommends}, python3-biomaj3-cli
Suggests: ${python3:Suggests}, python3-gunicorn, mongodb, redis-server
XB-Python-Egg-Name: biomaj
Depends: ${misc:Depends},
${python3:Depends},
unzip
Recommends: ${python3:Recommends},
python3-biomaj3-cli
Suggests: ${python3:Suggests},
python3-gunicorn,
mongodb,
redis-server
Description: BioMAJ workflow management library
BioMAJ downloads remote data banks, checks their status and applies
transformation workflows, with consistent state, to provide ready-to-use
......@@ -40,3 +47,4 @@ Description: BioMAJ workflow management library
This package contains the library to manage the workflow update in BioMAJ3,
it is managed via python3-biomaj3-daemon (for microservices remote operations)
or biomaj3-cli (local or remote) packages
XB-Python-Egg-Name: biomaj
#! /usr/bin/make -f
# The test should be really prevented
# export DH_BUILD_MAINT_OPTIONS=nocheck
export DEB_BUILD_OPTIONS=nocheck
export PYBUILD_NAME=biomaj
......@@ -12,5 +14,5 @@ override_dh_install:
override_dh_installexamples:
dh_installexamples
sed -i '1s;^;#!/usr/bin/bash\n;' debian/python3-biomaj3/usr/share/doc/python3-biomaj3/examples/tools/process/concat.sh
sed -i '1s;^;#!/bin/bash\n;' debian/python3-biomaj3/usr/share/doc/python3-biomaj3/examples/tools/process/concat.sh
chmod -x debian/python3-biomaj3/usr/share/doc/python3-biomaj3/examples/global.properties.example
......@@ -52,6 +52,9 @@ influxdb.db=biomaj
#micro.biomaj.service.user=1
#micro.biomaj.service.daemon=1
## Optional
# micro.biomaj.proxy.[user,cron,release,daemon,download,process]=http://127.0.0.1:5000
auto_publish=1
########################
......
......@@ -35,7 +35,7 @@ config = {
'url': 'http://biomaj.genouest.org',
'download_url': 'http://biomaj.genouest.org',
'author_email': 'olivier.sallou@irisa.fr',
'version': '3.1.3',
'version': '3.1.4',
'classifiers': [
# How mature is this project? Common values are
# 3 - Alpha
......