Skip to content

Commit

Permalink
Moved default Q definitions to get_default_queue() and distinguish be…
Browse files Browse the repository at this point in the history
…tween production and end user settings
  • Loading branch information
userrig@lmn committed Oct 24, 2016
1 parent 69705b9 commit 6d2d431
Show file tree
Hide file tree
Showing 11 changed files with 105 additions and 111 deletions.
21 changes: 10 additions & 11 deletions bcl2fastq/bcl2fastq.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,14 @@
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from pipelines import get_pipeline_version, get_site
from pipelines import get_pipeline_version
from pipelines import get_site
from pipelines import get_default_queue
from pipelines import PipelineHandler
from pipelines import get_machine_run_flowcell_id, is_devel_version
from pipelines import logger as aux_logger, generate_timestamp
from pipelines import get_machine_run_flowcell_id
from pipelines import is_devel_version
from pipelines import logger as aux_logger
from pipelines import generate_timestamp
from pipelines import get_cluster_cfgfile
from generate_bcl2fastq_cfg import MUXINFO_CFG, SAMPLESHEET_CSV, USEBASES_CFG, MuxUnit

Expand All @@ -50,10 +54,6 @@
# same as folder name. also used for cluster job names
PIPELINE_NAME = "bcl2fastq"

DEFAULT_SLAVE_Q = {'GIS': None,
'NSCC': 'production'}
DEFAULT_MASTER_Q = {'GIS': None,
'NSCC': 'production'}

OUTDIR_BASE = {
'GIS': {
Expand Down Expand Up @@ -204,11 +204,10 @@ def main():
help="Give this analysis run a name (used in email and report)")
parser.add_argument('--no-mail', action='store_true',
help="Don't send mail on completion")
site = get_site()
default = DEFAULT_SLAVE_Q.get(site, None)
default = get_default_queue('slave')
parser.add_argument('-w', '--slave-q', default=default,
help="Queue to use for slave jobs (default: {})".format(default))
default = DEFAULT_MASTER_Q.get(site, None)
default = get_default_queue('master')
parser.add_argument('-m', '--master-q', default=default,
help="Queue to use for master job (default: {})".format(default))
parser.add_argument('-l', '--lanes', type=int, nargs="*",
Expand Down Expand Up @@ -385,7 +384,7 @@ def main():

pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
outdir, user_data, site=site,
outdir, user_data,
logger_cmd=mongo_update_cmd,
master_q=args.master_q,
slave_q=args.slave_q,
Expand Down
15 changes: 5 additions & 10 deletions custom/SG10K/SG10K.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from readunits import get_readunits_from_args
from pipelines import get_pipeline_version
from pipelines import PipelineHandler
from pipelines import get_site
from pipelines import get_default_queue
from pipelines import logger as aux_logger
from pipelines import get_cluster_cfgfile

Expand All @@ -50,11 +50,6 @@
# same as folder name. also used for cluster job names
PIPELINE_NAME = "SG10K"

DEFAULT_SLAVE_Q = {'GIS': None,
'NSCC': 'production'}
DEFAULT_MASTER_Q = {'GIS': None,
'NSCC': 'production'}

# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
Expand All @@ -77,11 +72,11 @@ def main():
help="Give this analysis run a name (used in email and report)")
parser.add_argument('--no-mail', action='store_true',
help="Don't send mail on completion")
site = get_site()
default = DEFAULT_SLAVE_Q.get(site, None)
#site = get_site()
default = get_default_queue('slave')
parser.add_argument('-w', '--slave-q', default=default,
help="Queue to use for slave jobs (default: {})".format(default))
default = DEFAULT_MASTER_Q.get(site, None)
default = get_default_queue('master')
parser.add_argument('-m', '--master-q', default=default,
help="Queue to use for master job (default: {})".format(default))
parser.add_argument('-n', '--no-run', action='store_true')
Expand Down Expand Up @@ -167,7 +162,7 @@ def main():

pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
args.outdir, user_data, site=site,
args.outdir, user_data,
master_q=args.master_q,
slave_q=args.slave_q,
params_cfgfile=args.params_cfg,
Expand Down
60 changes: 51 additions & 9 deletions lib/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,26 @@
'local': "true",# dummy command
}


DEFAULT_SLAVE_Q = {
'GIS': {
'enduser': None, 'production': None,
},
'NSCC': {
'enduser': None, 'production': 'production',
}
}

DEFAULT_MASTER_Q = {
'GIS': {
'enduser': None, 'production': None,
},
'NSCC': {
'enduser': None, 'production': 'production',
}
}


# from address, i.e. users should reply to to this
# instead of rpd@gis to which we send email
RPD_MAIL = "rpd@gis.a-star.edu.sg"
Expand All @@ -81,7 +101,7 @@ class PipelineHandler(object):

# output
PIPELINE_CFGFILE = "conf.yaml"

RC_DIR = "rc"

RC_FILES = {
Expand Down Expand Up @@ -127,26 +147,26 @@ def __init__(self,
self.log_dir_rel = self.LOG_DIR_REL
self.masterlog = self.MASTERLOG
self.submissionlog = self.SUBMISSIONLOG

if params_cfgfile:
assert os.path.exists(params_cfgfile)
self.params_cfgfile = params_cfgfile

if modules_cfgfile:
assert os.path.exists(modules_cfgfile)
self.modules_cfgfile = modules_cfgfile

if refs_cfgfile:
assert os.path.exists(refs_cfgfile)
self.refs_cfgfile = refs_cfgfile

if cluster_cfgfile:
assert os.path.exists(cluster_cfgfile)
self.cluster_cfgfile = cluster_cfgfile

self.pipeline_cfgfile_out = os.path.join(
self.outdir, self.PIPELINE_CFGFILE)

# RCs
self.dk_init_file = os.path.join(
self.outdir, self.RC_FILES['DK_INIT'])
Expand Down Expand Up @@ -174,7 +194,7 @@ def __init__(self,
if self.cluster_cfgfile:
self.cluster_cfgfile_out = os.path.join(outdir, "cluster.yaml")
# else: local

# run template
self.run_template = os.path.join(
pipeline_rootdir, "lib", "run.template.{}.sh".format(self.site))
Expand Down Expand Up @@ -266,7 +286,7 @@ def write_run_template(self):
with open(self.run_out, 'w') as fh:
fh.write(templ.format(**d))


def read_cfgfiles(self):
"""parse default config and replace all RPD env vars
"""
Expand All @@ -292,7 +312,7 @@ def read_cfgfiles(self):
else:
assert cfgkey not in merged_cfg
merged_cfg[cfgkey] = cfg

# determine num_chroms needed by some pipelines
# FIXME ugly because sometimes not needed
if merged_cfg.get('references'):
Expand Down Expand Up @@ -517,6 +537,28 @@ def email_for_user():
return toaddr



def is_production_user():
return getuser() == "userrig"


def get_default_queue(master_or_slave):
"""FIXME:add-doc
"""

if is_production_user():
user = 'production'
else:
user = 'enduser'
site = get_site()
if master_or_slave == 'master':
return DEFAULT_MASTER_Q[site][user]
elif master_or_slave == 'slave':
return DEFAULT_MASTER_Q[site][user]
else:
raise ValueError(master_or_slave)


def send_status_mail(pipeline_name, success, analysis_id, outdir,
extra_text=None, pass_exception=True):
"""
Expand Down
15 changes: 5 additions & 10 deletions mapping/BWA-MEM/BWA-MEM.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from readunits import get_readunits_from_args
from pipelines import get_pipeline_version
from pipelines import PipelineHandler
from pipelines import get_site
from pipelines import get_default_queue
from pipelines import logger as aux_logger
from pipelines import ref_is_indexed
from pipelines import get_cluster_cfgfile
Expand All @@ -52,11 +52,6 @@
# same as folder name. also used for cluster job names
PIPELINE_NAME = "BWA-MEM"

DEFAULT_SLAVE_Q = {'GIS': None,
'NSCC': 'production'}
DEFAULT_MASTER_Q = {'GIS': None,
'NSCC': 'production'}

# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
Expand All @@ -79,11 +74,11 @@ def main():
help="Give this analysis run a name (used in email and report)")
parser.add_argument('--no-mail', action='store_true',
help="Don't send mail on completion")
site = get_site()
default = DEFAULT_SLAVE_Q.get(site, None)
#site = get_site()
default = get_default_queue('slave')
parser.add_argument('-w', '--slave-q', default=default,
help="Queue to use for slave jobs (default: {})".format(default))
default = DEFAULT_MASTER_Q.get(site, None)
default = get_default_queue('master')
parser.add_argument('-m', '--master-q', default=default,
help="Queue to use for master job (default: {})".format(default))
parser.add_argument('-n', '--no-run', action='store_true')
Expand Down Expand Up @@ -177,7 +172,7 @@ def main():

pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
args.outdir, user_data, site=site,
args.outdir, user_data,
master_q=args.master_q,
slave_q=args.slave_q,
params_cfgfile=args.params_cfg,
Expand Down
15 changes: 5 additions & 10 deletions metagenomics/essential-genes/essential-genes.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from readunits import get_readunits_from_args
from pipelines import get_pipeline_version
from pipelines import PipelineHandler
from pipelines import get_site
from pipelines import get_default_queue
from pipelines import logger as aux_logger
from pipelines import ref_is_indexed
from pipelines import get_cluster_cfgfile
Expand All @@ -50,11 +50,6 @@
# same as folder name. also used for cluster job names
PIPELINE_NAME = "essential-genes"

DEFAULT_SLAVE_Q = {'GIS': None,
'NSCC': 'production'}
DEFAULT_MASTER_Q = {'GIS': None,
'NSCC': 'production'}

MARK_DUPS = True

# global logger
Expand All @@ -80,11 +75,11 @@ def main():
help="Give this analysis run a name (used in email and report)")
parser.add_argument('--no-mail', action='store_true',
help="Don't send mail on completion")
site = get_site()
default = DEFAULT_SLAVE_Q.get(site, None)
#site = get_site()
default = get_default_queue('slave')
parser.add_argument('-w', '--slave-q', default=default,
help="Queue to use for slave jobs (default: {})".format(default))
default = DEFAULT_MASTER_Q.get(site, None)
default = get_default_queue('master')
parser.add_argument('-m', '--master-q', default=default,
help="Queue to use for master job (default: {})".format(default))
parser.add_argument('-n', '--no-run', action='store_true')
Expand Down Expand Up @@ -186,7 +181,7 @@ def main():

pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
args.outdir, user_data, site=site,
args.outdir, user_data,
master_q=args.master_q,
slave_q=args.slave_q,
params_cfgfile=args.params_cfg,
Expand Down
15 changes: 5 additions & 10 deletions rnaseq/fluidigm-ht-c1-rnaseq/fluidigm-ht-c1-rnaseq.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from readunits import get_readunits_from_args
from pipelines import get_pipeline_version
from pipelines import PipelineHandler
from pipelines import get_site
from pipelines import get_default_queue
from pipelines import logger as aux_logger
from pipelines import get_cluster_cfgfile

Expand All @@ -49,11 +49,6 @@
# same as folder name. also used for cluster job names
PIPELINE_NAME = "fluidigm-ht-c1-rnaseq"

DEFAULT_SLAVE_Q = {'GIS': None,
'NSCC': 'production'}
DEFAULT_MASTER_Q = {'GIS': None,
'NSCC': 'production'}

# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
Expand All @@ -76,11 +71,11 @@ def main():
help="Give this analysis run a name (used in email and report)")
parser.add_argument('--no-mail', action='store_true',
help="Don't send mail on completion")
site = get_site()
default = DEFAULT_SLAVE_Q.get(site, None)
#site = get_site()
default = get_default_queue('slave')
parser.add_argument('-w', '--slave-q', default=default,
help="Queue to use for slave jobs (default: {})".format(default))
default = DEFAULT_MASTER_Q.get(site, None)
default = get_default_queue('master')
parser.add_argument('-m', '--master-q', default=default,
help="Queue to use for master job (default: {})".format(default))
parser.add_argument('-n', '--no-run', action='store_true')
Expand Down Expand Up @@ -166,7 +161,7 @@ def main():

pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
args.outdir, user_data, site=site,
args.outdir, user_data,
master_q=args.master_q,
slave_q=args.slave_q,
params_cfgfile=args.params_cfg,
Expand Down
Loading

0 comments on commit 6d2d431

Please sign in to comment.