#! /usr/bin/python
 # ***************************************************************************
 #
 #                              KisSplice
 #      de-novo calling alternative splicing events from RNA-seq data.
 #
 # ***************************************************************************
 #
 # Copyright INRIA 
 #  contributors :  Vincent Lacroix
 #                  Pierre Peterlongo
 #                  Gustavo Sacomoto
 #                  Alice Julien-Laferriere
 #                  David Parsons
 #                  Vincent Miele
 #
 # pierre.peterlongo@inria.fr
 # vincent.lacroix@univ-lyon1.fr
 #
 # This software is a computer program whose purpose is to detect alternative
 # splicing events from RNA-seq data.
 #
 # This software is governed by the CeCILL license under French law and
 # abiding by the rules of distribution of free software. You can  use,
 # modify and/ or redistribute the software under the terms of the CeCILL
 # license as circulated by CEA, CNRS and INRIA at the following URL
 # "http://www.cecill.info".

 # As a counterpart to the access to the source code and  rights to copy,
 # modify and redistribute granted by the license, users are provided only
 # with a limited warranty  and the software's author,  the holder of the
 # economic rights,  and the successive licensors  have only  limited
 # liability.

 # In this respect, the user's attention is drawn to the risks associated
 # with loading,  using,  modifying and/or developing or reproducing the
 # software by the user in light of its specific status of free software,
 # that may mean  that it is complicated to manipulate,  and  that  also
 # therefore means  that it is reserved for developers  and  experienced
 # professionals having in-depth computer knowledge. Users are therefore
 # encouraged to load and test the software's suitability as regards their
 # requirements in conditions enabling the security of their systems and/or
 # data to be ensured and,  more generally, to use and operate it in the
 # same conditions as regards security.
 #
 # The fact that you are presently reading this means that you have had
 # knowledge of the CeCILL license and that you accept its terms.
import os
import re
import sys
import time
import shlex
import struct
import shutil
import os.path
import tempfile
import argparse  
import threading
import multiprocessing
from operator import itemgetter 
from subprocess import Popen, PIPE, STDOUT


TIMEOUT=900

unfinished_bccs = []
num_snps = {}
    
class Command(object): # deprecated in the future with Python3
    def __init__(self):
        self.process = None
        
    def target(self, **kwargs):
        self.process = Popen(kwargs["args"], stdout=kwargs["stdout"], stderr=PIPE)
        com = self.process.communicate()
        if com[0]: print com[0]

        # Prints stderr that was piped by Popen 
        if com[1] and kwargs["verbose"]: 
            print com[1] 
        
    def run(self, command_line, out_file = "", mode = 'w', verbose = False, timeout = sys.maxint):
        if verbose:
            print "Running "+command_line    
        args = shlex.split(command_line)
        if len(out_file):
            stdout_file = open(out_file, mode)
            kwargs = {"verbose":verbose, "args":args, "stdout":stdout_file}
        else:
            kwargs = {"verbose":verbose, "args":args, "stdout":PIPE}  
            
        # Create a Thread object that will run "self.target" with arguments kwargs
        # (given in the form of keyword argument) and start it
        thread = threading.Thread(target=self.target, kwargs=kwargs)
        thread.start()

        # Wait for end of thread or time out
        thread.join( timeout )

        # Check whether thread has ended or timed out
        # (if timed out, kill it and wait for it to actually die)
        if thread.is_alive():
            self.process.terminate()
            thread.join()

        if len(out_file):
            stdout_file.close()

        if self.process.returncode == -15:
            print >> sys.stderr, "\n\t\t *** Timeout reached! ***\n" #+ command_line
        elif self.process.returncode == 15:
            print >> sys.stderr, "\n\t\t *** Maximum number of bubbles reached! ***\n" 
        elif self.process.returncode != 0:
            print >> sys.stderr, "Problem with " + command_line  + " : this may be due to memory issues. try setting your stack size as unlimited using: ulimit -s unlimited "

            sys.exit(self.process.returncode)
        else: 
            if verbose:       
                print "Done!"
            
def mkdirTmp(tmpdir=None):
    if not tmpdir:
        workdir = tempfile.mkdtemp(prefix="kissplice.")
    else:
        workdir = tempfile.mkdtemp(prefix="kissplice.", dir=tmpdir)
    return workdir

def cleanTmp(workdir):
    shutil.rmtree(workdir) 

    
def subprocessLauncher(command_line, out_file = "", mode = 'w', verbose = False, timeout = sys.maxint):
    command = Command()
    command.run(command_line, out_file, mode, verbose, timeout)
    return command.process.returncode

def to_file(readfiles, filename = "tmp"):
    f = open(filename, 'w')
    reads = readfiles.split(' ')
    for r in reads:
        f.write(r + "\n")
    f.close()

# Run debruijn graph construction
def build_graph(KISSPLICE_INSTDIR, workdir, readfiles, kval, graphfile, min_cov, genome_size, verbose = False):    
    print "Building de Bruijn graph..."
    all_read_files = workdir + "/all_read_filenames"
    to_file(readfiles, all_read_files)
    command_line = KISSPLICE_INSTDIR+"/ks_debruijn4 " + all_read_files+" "+str(kval)+" "+str(min_cov)+" "+str(genome_size)+" "+ workdir + "/graph --kissplice --4bloom"
    subprocessLauncher(command_line, verbose=verbose)
    shutil.move(workdir+"/graph.edges", graphfile + ".edges")
    shutil.move(workdir+"/graph.nodes", graphfile + ".nodes")
    shutil.move(workdir+"/graph.solid_kmers_binary_with_count", graphfile + ".solid_kmers_binary_with_count")

#Run error_removal for the graph (overwrite edge file)
def error_removal(KISSPLICE_INSTDIR, workdir, kval, graphfile, cutoff, verbose = False):
    print "\n\nRemoving sequencing errors..."

    # convert binary count file to text 
    if os.path.isfile(graphfile+".solid_kmers_binary_with_count"):
        fin = open(graphfile+".solid_kmers_binary_with_count", "rb")
        kmer_nbits = struct.unpack("i",fin.read(4))[0]
        k = struct.unpack("i",fin.read(4))[0]
        fout = open(graphfile+".counts", "w")
        try:
            while True:
                kmer_binary = struct.unpack('B'*(kmer_nbits / 8), fin.read( kmer_nbits / 8) )
                abundance = struct.unpack( 'I', fin.read(4) ) [0]
                kmer = ""
                for i in xrange(k):
                    kmer = "ACTG"[(kmer_binary[i/4] >> (2*(i%4)) ) % 4] + kmer
                print >> fout, kmer, abundance
        except:
            fin.close()
        fout.close()
    
    if os.path.isfile(graphfile+".counts"):
        command_line = KISSPLICE_INSTDIR+"/ks_error_removal "+graphfile+".edges "+graphfile+".nodes "+str(kval)+" "+graphfile+".counts "+str(cutoff)+" "+graphfile+"_C"+str(cutoff) 
        subprocessLauncher(command_line, verbose=verbose)
    else:
        print "k-mer count file not found! Skipping error removal step."


#Run the modules on the graph
def run_modules(KISSPLICE_INSTDIR, workdir, graphfile, kval, cutoff, verbose = False, output_context = False, exec_error_removal = False):
    if not os.path.exists(workdir+"/bcc"):
        os.mkdir(workdir+"/bcc")    
    print "\nFinding the BCCs..."
    
    edge_suffix = ".edges"
    if exec_error_removal:
        edge_suffix = "_C"+str(cutoff)+".edges"
    
    command_line = KISSPLICE_INSTDIR+"/ks_run_modules "+graphfile+edge_suffix+" "+graphfile+".nodes "+str(kval)+" "+workdir+"/bcc/graph"

    if output_context:
        command_line += " --output-context"
    subprocessLauncher(command_line, verbose=verbose)
  
def count_alreadyfoundSNPs(workdir): 
    global num_snps 
    info_snp_file = open(workdir+"/bcc/graph_info_snp_bcc", 'r')
    info_snp = info_snp_file.readlines()
    for bcc_snp in info_snp:
        info = bcc_snp.split()# format: bcc_id num_snps 
        num_snps[info[0]] = int(info[1])  
    info_snp_file.close()
 
def enumerate_all_bubbles(KISSPLICE_INSTDIR, workdir, outdir, kval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, nbprocs=1, verbose = False, output_context = False, output_path = False):
    print "Enumerating all bubbles..."

    if os.path.isfile(workdir+"/all_bcc_type0_"+str(kval)):
      os.remove(workdir+"/all_bcc_type0_"+str(kval))
    if os.path.isfile(workdir+"/all_bcc_type1234_"+str(kval)):
        os.remove(workdir+"/all_bcc_type1234_"+str(kval))    
    f = open(workdir+"/bcc/graph_info_bcc")
    n_bcc = int(f.readline())
    f.close()
    
    file2size = {}

    # filling num_snps
    count_alreadyfoundSNPs(workdir); 
    # ordering bcc by decreasing size and filtering if <4 nodes
    f = open( workdir+"/bcc/graph_info_bcc")
    bccnum2size = f.readlines()[2:]
    bccnumorderedbysize = [int(e[0])+1 for e in sorted(enumerate([int(t.split()[1]) for t in  bccnum2size]), key=lambda x:x[1], reverse=True) if int(e[1])>3]
    f.close()
    if verbose:
        if len(bccnumorderedbysize) != len(bccnum2size):
            print "Less than 4 nodes, cannot contain a bubble!" 

    # multiprocessing step-  BEGIN 
    pool = multiprocessing.Pool(nbprocs)
    TASKS = []
    for i in bccnumorderedbysize:
        TASKS +=  [(enumerate_bubbles_core, i, KISSPLICE_INSTDIR, workdir, outdir, kval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, verbose, output_context, output_path)]

    imap_unordered_it = pool.imap_unordered(eval_func_tuple, TASKS, 1)
    
    for x in imap_unordered_it:
        if x != -1:
            unfinished_bccs.append(x);
          
    pool.close()
    pool.join()
    # multiprocessing step - END
      
    destinationSNPS = open(workdir+"/all_bcc_type0_"+str(kval), 'wb') ## THE FILE CONTAINS SNPS
    destination1234 = open(workdir+"/all_bcc_type1234_"+str(kval), 'wb') ## THE FILE CONTAINS other bcc
    for file in os.listdir(workdir):
        if file[0:17] == "tmp_all_bcc_type0":
            shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destinationSNPS)
        if file[0:20] == "tmp_all_bcc_type1234":
            shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destination1234)    
    destinationSNPS.close()
    destination1234.close()

    if output_path:
        destination_paths = open(workdir+"/all_paths_k"+str(kval), 'wb')
        for file in os.listdir(workdir):
            if file[0:18] == "tmp_all_paths_bcc_":
                shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destination_paths)
        destination_paths.close()

    f = open(workdir+"/all_bcc_type0_"+str(kval))
    size0 = sum(1 for line in f)
    f.close()
    f = open(workdir+"/all_bcc_type1234_"+str(kval))
    size1234 = sum(1 for line in f) 
    f.close()
    n_bubbles = (size0 + size1234)/4
    print "Total number of bubbles found: ", n_bubbles  




def enumerate_bubbles_core(i, KISSPLICE_INSTDIR, workdir, outdir, kval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, verbose = False, output_context = False, output_path = False):  
    if verbose:
      print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"
      print "Enumerating bubbles in biconnected component "+str(i)
    infofile = workdir+"/bcc/graph_info_bcc"
    contents_file_edges = workdir+"/bcc/graph_contents_edges_bcc"
    contents_file_nodes = workdir+"/bcc/graph_contents_nodes_bcc"
    basename_edges = workdir+"/bcc/graph_all_edges_bcc"
    basename_nodes = workdir+"/bcc/graph_all_nodes_bcc"

    # Contains -1 if the process finished or the bcc number if it timed out.
    flag = -1
    
    # already num_snps found - it is also the starting number from enumerating cycle
    num_snps_bcc = 0
    if str(i) in num_snps:
        num_snps_bcc = num_snps[str(i)]
    command_line = KISSPLICE_INSTDIR+"/ks_bubble_enumeration "+ infofile+" "+ contents_file_edges+" "+ contents_file_nodes+" "+ basename_edges+" "+ basename_nodes\
        +" "+str(i) \
        +" "+str(kval)+" "+workdir+"/bcc/tmp_bcc_sequences_"+str(kval)+"_"+multiprocessing.current_process().name+" "+str(min_edit_dist) \
        +" bcc_"+str(i) + " " + str(num_snps_bcc) + " -u "+str(UL_MAX) \
        +" -L "+str(LL_MAX)+" -l "+str(LL_MIN)+" -M "+str(max_cycles)
    if not output_snps:
      command_line += " -s"
    if output_context:
      command_line += " -c"
    if output_path:
      command_line += " -p"

    process_returncode = subprocessLauncher(command_line, verbose=verbose, timeout=timeout)

    # Store the bcc number if it timed out OR the maximum number of bubbles was reached 
    if process_returncode == -15 or process_returncode == 15:
        flag = i
    # Only append the results if the enumeration is complete.
    else: 
        command_line_type0 = KISSPLICE_INSTDIR+"/ks_clean_duplicates " + workdir + "/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ "_type0.fa"
        command_line_type1234 = KISSPLICE_INSTDIR+"/ks_clean_duplicates " + workdir + "/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ "_type1234.fa"
        subprocessLauncher(command_line_type0, workdir+"/tmp_all_bcc_type0_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE
        subprocessLauncher(command_line_type1234, workdir+"/tmp_all_bcc_type1234_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE

        if output_path:
            command_line = "cat "+workdir+"/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ ".path" 
            subprocessLauncher(command_line, workdir+"/tmp_all_paths_bcc_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE 
 
    if verbose:          
      print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"  
    
    return flag

def eval_func_tuple(f_args):
    return f_args[0](*f_args[1:])       
        
def check_read_coverage_and_sort_all_bubbles(KISSPLICE_INSTDIR, readfiles, workdir, outdir, kval, output_snps, infix_name,  countsMethods, minOverlap, substitutions, verbose = False):
    print "Compute read coherence and coverage"
    if output_snps:
        destinationSNPS = open(workdir+"/all_bcc_type0_"+str(kval), 'ab') ## THE UNIQUE FILE ALSO CONTAINS SNPS
        shutil.copyfileobj(open(workdir+"/bcc/graph_all_log_bcc", 'rb'), destinationSNPS)
        destinationSNPS.close()

    # Two KisSreads executions, one for type 0 one for type 1234
    
    commandLineType0 = KISSPLICE_INSTDIR+"/ks_kissreadsSNPS "+workdir+"/all_bcc_type0_"+str(kval)+" "+readfiles+" -k "+str(kval + 1 )+" -o "+ workdir+"/coherentType0.fa -u "+workdir+"/uncoherentType0.fa  -d " + str(substitutions) + " -c 1 -n"
    #  Du to the k extension, anchor should be of size k+1 for SNP
    subprocessLauncher(commandLineType0, verbose=verbose)
    # no n options anymore
    commandLineType1234 = KISSPLICE_INSTDIR+"/ks_kissreadsSplice "+workdir+"/all_bcc_type1234_"+str(kval)+" "+readfiles+" -k "+str(kval + 1 )+" -o "+workdir+"/coherentType1234.fa -u "+workdir+"/uncoherentType1234.fa  -d " + str(substitutions) + " -c 1 -j " + countsMethods +" -l " + str(minOverlap)
    subprocessLauncher(commandLineType1234, verbose=verbose)
   
    commandLineCat = "cat " +  workdir+"/uncoherentType1234.fa " + workdir+"/uncoherentType0.fa "
    subprocessLauncher(commandLineCat, workdir + "/uncoherent.fa", "a", verbose=verbose )
    
    print "Sorting all bubbles..."

    nb = [0]*5# counter of number of events of each type found
    cofilel = []
    for i in range(0,5):
        cofilel.append(open(outdir+"/results_"+infix_name+"_coherents_type_"+str(i)+".fa", 'w'))
    snpsFile = open(workdir+"/coherentType0.fa", 'r')
    l = snpsFile.readlines()
    l.sort( reverse = True )
    snpsFile.close()
    for event in l:
        cofilel[0].write( event.split()[-1].replace(';','\n')+"\n")#Transform to Fasta type
        nb[0] += 1

    # handling coherent "other"
    cofile = open(workdir+"/coherentType1234.fa", 'r')
    l = cofile.readlines()
    l.sort(reverse=True)        
    cofile.close()    
    retype = re.compile('Type_\d+')   
    for event in l:
        try:
            type = retype.search(event).group()
            for i in range(1,5):
                if (type=="Type_"+str(i)):
                    cofilel[i].write(event.split()[-1].replace(';','\n')+"\n")#Transform to Fasta type
                    nb[i] += 1
        except:
            pass      
    for i in range(0,5):
        cofilel[i].close()
    uncofile = open(workdir+"/uncoherent.fa", 'r') 
    uncofileout = open(outdir+"/results_"+infix_name+"_uncoherent.fa", 'w')
    for event in uncofile.readlines(): 
        uncofileout.write(event.split()[-1].replace(';','\n')+"\n")
    uncofile.close()
    uncofileout.close()
    print "\n\n \t\t ******** We are done, final coherent results are in files "+outdir+"/results_"+infix_name+"_coherents_type_*.fa ********** " 
    print " \t\t ******** All non read coherent results are in files "+outdir+"/results_"+infix_name+"_uncoherent.fa ****** \n\n"
    return nb

    
def sort_all_bubbles(KISSPLICE_INSTDIR, readfiles, workdir, outdir, kval, output_snps, infix_name, verbose = False):
    if output_snps:
        destination = open(workdir+"/all_bcc_type0_"+str(kval), 'ab') ## THE UNIQUE FILE ALSO CONTAINS SNPS
        shutil.copyfileobj(open(workdir+"/bcc/graph_all_log_bcc", 'rb'), destination)
        destination.close()
            
    print "Sampling bubbles by type..."
    retype = re.compile('Type_\d+')
    filel = []
    for i in range(0,5):
        filel.append(open(outdir+"/results_"+infix_name+"_type_"+str(i)+".fa", 'w'))
    
    nb = [0]*5
    # handline type 0 ( snps ) 
    cfile = open(workdir+"/all_bcc_type0_"+str(kval), 'r')
    ofile = filel[0]
    shutil.copyfileobj(cfile, ofile) 
    cfile.close()
    cfile = open(workdir+"/all_bcc_type0_"+str(kval), 'r')
    nb[0] = sum(1 for line in cfile)/2
    cfile.close()
    # handling the other type
    cfile = open(workdir+"/all_bcc_type1234_"+str(kval), 'r')   
    for line in cfile.readlines():
        try:
            type = retype.search(line).group()
            for i in range(0,5):
                if (type=="Type_"+str(i)):
                    ofile = filel[i] 
                    nb[i] += 1                
        except:
            pass
        ofile.write(line)     
    for i in range(0,5):
        nb[i] /= 2
        filel[i].close()                 
    print "\n\n \t\t ******** We are done, final results are in files "+outdir+"/results_"+infix_name+"_type_*.fa **********" 
    cfile.close()
    return nb
        
def save_unfinished_bccs(unfinished_bccs, KISSPLICE_INSTDIR, workdir, outdir, verbose = False):
    if not os.path.exists(outdir + "/unfinished_bcc"):  
        os.mkdir(outdir + "/unfinished_bcc")
    infofile = workdir+"/bcc/graph_info_bcc"
    contents_file_edges = workdir+"/bcc/graph_contents_edges_bcc"
    contents_file_nodes = workdir+"/bcc/graph_contents_nodes_bcc"
    basename_edges = workdir+"/bcc/graph_all_edges_bcc"
    basename_nodes = workdir+"/bcc/graph_all_nodes_bcc"
    print "\t\t Backup files for the unfinished BCCs are in " + outdir + "/unfinished_bcc\n"
    
    for i in unfinished_bccs:    
        command_line = KISSPLICE_INSTDIR+"/ks_print_bcc "+ infofile+" "+ contents_file_edges+" "+ contents_file_nodes+" "+ basename_edges+" "+ basename_nodes\
            +" "+str(i)+" "\
            + outdir+"/unfinished_bcc/graph_edges_bcc_"+str(i)+" "\
            + outdir+"/unfinished_bcc/graph_nodes_bcc_"+str(i)
        subprocessLauncher(command_line, verbose=verbose)
            
# ############################################################################
#                                   Main
# ############################################################################
def main():
  # @variable KISSPLICE_INSTDIR : Path to the main executable (this file)
  # @variable KS_SEC_EXEC_PATH  : Path to the secondary executables (eg ks_kissreads)
  #                               ~ KISSPLICE_INSTDIR/../lib/kissplice
  KISSPLICE_INSTDIR = os.path.dirname(os.path.abspath(sys.argv[0])).rstrip('/')
  KS_SEC_EXEC_PATH  = KISSPLICE_INSTDIR.rsplit('/',1)[0]+'/lib/kissplice'
  
  # ========================================================================
  #                        Manage command line arguments
  # ========================================================================
  parser = argparse.ArgumentParser(description='kisSplice - local assembly of SNPs, indels and AS events') 
  
  # ------------------------------------------------------------------------
  #                            Define allowed options
  # ------------------------------------------------------------------------
  parser.add_argument("-r", action="append", dest="readfiles",
                      help="input fasta/q read files or compressed (.gz) fasta/q files (mutiple, such as \"-r file1 -r file2...\") ") 
  parser.add_argument('-k', action="store", dest="kval", type=int, default=41,
                      help="k-mer size (default=41)")    
  parser.add_argument('-l', action="store", dest="llmax", type=int, default=0,
                      help="maximal length of the shorter path (default: 2k-1)")    
  parser.add_argument('-m', action = "store", dest = "LL_MIN", default = 0, help = "minimum length of the shorter path (default 2k-8)")
  parser.add_argument('-M', action = "store", dest = "UL_MAX", default = 10000, help = "maximum length of the longest path (default 10000), skipped exons longer than UL_MAX are not reported")
  parser.add_argument('-g', action="store", dest="graph_prefix", default="",
                      help="path and prefix to pre-built de Bruijn graph (suffixed by .edges/.nodes)\n \
                      if jointly used with -r, graph used to find bubbles and reads used for quantification")
  parser.add_argument('-o', action="store", dest="out_dir", default="results",
                      help="path to store the results (default = ./results)")   
  parser.add_argument('-d', action="store", dest="path_to_tmp", default=None,
                      help="specific directory (absolute path) where to build temporary files (default temporary directory otherwise)")
  parser.add_argument('-t', action="store", dest="nbprocs", type=int, default=1,
                      help="number of cores (must be <= number of physical cores)")
  parser.add_argument('-s', action="store_true", dest="nooutput_snps", help="Will not output SNPs (save time)")  
  parser.add_argument('-v', action="store_true", dest="verbose", help="Verbose mode")   
  parser.add_argument('-u', action="store", dest="keep_ubccs", type=int, default=1, help=" 0 or 1. 1 to keep the nodes/edges file for unfinished bccs, 0 to discard (default:1) ")
  parser.add_argument('-c',  action = "store", dest = "min_cov", default = 2, help="an integer, k-mers present strictly less than this number of times in the dataset will be discarded (default 2)")
  parser.add_argument('-C',  action = "store", dest = "min_relative_cov", default = 0.02, help="a percentage from [0,1), edges with relative coverage below this number are removed (default 0.02)")
  parser.add_argument('-z', action="store", dest="genome_size", type=int, default=1000000000,
                      help="estimated genome/transcriptome size (default = 1000000000)")    
  parser.add_argument('-e',  action = "store", dest = "min_edit_dist", default = 3, 
                      help="edit distance threshold, if the two sequences (paths) of a bubble have edit distance smaller than this threshold, the bubble is classified as an inexact repeat (default 3)")
  parser.add_argument('-y',  action = "store", dest = "max_cycles", default = 10000,
                       help="maximal number of bubbles enumeration in each bcc. If exceeded, no bubble is output for the bcc (default 10000)")
  parser.add_argument('--mismatches',  action = "store", dest = "mism", default = 0, type = int, 
                      help="Maximal number of substitutions authorized between a read and a fragment (for quantification only), default 0. If you increase the mismatch and use --counts think of increasing min_overlap too.")
  parser.add_argument('--counts',  action = "store", dest = "countsMethod", default = "0", help="0,1 or 2 . Changes how the counts will be reported. If 0 (default): total counts, if 1: counts on junctions, if 2: all counts. see User guide for more information ")
  parser.add_argument('--min_overlap',  action = "store", dest = "minOverlap", default = 3, help="Set how many nt must overlap a junction to be counted by --counts option. Default=3. see User guide for more information ")
  parser.add_argument('--timeout', action='store', dest="timeout", default=TIMEOUT,
                      help="max amount of time (in seconds) spent for enumerating bubbles in each bcc. If exceeded, no bubble is output for the bcc (default 900)")
  parser.add_argument('--version', action='version', version='%(prog)s 2.1.0') 
  parser.add_argument('--output-context', action="store_true", dest="output_context", default = False, help="Will output the maximum non-ambiguous context of a bubble")
  parser.add_argument('--output-path', action="store_true", dest="output_path", default = False, help="Will output the id of the nodes composing the two paths of the bubbles.")
    

  # ------------------------------------------------------------------------
  #               Parse and interpret command line arguments
  # ------------------------------------------------------------------------
  options = parser.parse_args()     


 # ------------------------------------------------------------------------
 #                 Print version and command line
 # ------------------------------------------------------------------------

  print "\nThis is KisSplice, version 2.1.0"
  print "The command line was:\n"
  print ' '.join(sys.argv)
  print "\n"
  # ---------------------------------------------------------- Input options
  readfiles = None
  only_graph = False
  if options.readfiles:
    if options.graph_prefix: # GRAPH + READS
      print "-r and -g options used together: ",
      print "the graph will be used to find events, while reads files are used for checking read-coherency and coverage"
    readfiles = ' '.join(map(str, options.readfiles))
  else:
    if not options.graph_prefix: # GRAPH     
      parser.print_usage()
      print "Try `kissplice --help` for more information"
      sys.exit(1);
    else:
      only_graph = True
      
  nobuild = False
  if options.graph_prefix:
    nobuild = True
   
  # --------------------------------------------------------- Output options
  outdir = options.out_dir
  if not os.path.exists(outdir):
    os.mkdir(outdir)
  
  output_snps = True
  if options.nooutput_snps:
    output_snps = False
  if options.verbose:
    print "Will work on reads contained in files ", readfiles
    print "Results stored in ", options.out_dir  
  
  # ------------------------------------------------------------- k-mer size
  kval = options.kval
  if kval %2 == 0:
    print "please use only odd value for k"# otherwise, DBG use k-1 and output context do not work
    sys.exit(1);
  # ------------------------------------- Maximal length of the shorter path
  if options.llmax != 0:
    LL_MAX = options.llmax
  else:
    LL_MAX = 2 * kval - 1
  # The following are not optional but work along with llmax
  UL_MAX = options.UL_MAX # Defines maximum upper and lower path bounds
  if options.LL_MIN != 0:
    LL_MIN= options.LL_MIN
  else:
    LL_MIN = 2 * kval - 8
    
  min_ll_max = 2 * kval - 1
  if LL_MAX < min_ll_max:
    print "Error, maximal length of the shorter path (=$LL_MAX) should be bigger than 2k-1=$min_ll_max"
    sys.exit(1)
 
  # ------------------------------------------------------- Minimum coverage
  min_cov = options.min_cov
  
  min_edit_dist = options.min_edit_dist
  max_cycles = options.max_cycles
  countsMethod = options.countsMethod


  #--------------------------------------------------------- Minimum overlap for --counts option

  minOverlap = options.minOverlap
  # ========================================================================
  #            Construct intermediate and output file names
  # ========================================================================
  workdir = mkdirTmp(options.path_to_tmp)  
  infix_name = "" # will be the central part of the output file names
  if options.graph_prefix:  
    graphfile = options.graph_prefix
  if options.readfiles:
    for file in options.readfiles: 
      justfilename =  file.split("/")[-1].split(".")[0] #remove what is before the "/" and what is after the "."
      infix_name += justfilename+"_"
    infix_name = infix_name[0:200] # Truncate it to contain at most 200 characteres       
    infix_name += "k" + str(kval)
    if not options.graph_prefix:    
      graphfile = options.out_dir+"/graph_"+infix_name # Output graph file
      print "Graph will be written in "+graphfile+".[edges/nodes]"
  else:
    infix_name = graphfile.split("/")[-1].split(".")[0] #remove what is before the "/" and what is after the "."


  # ========================================================================
  #                                   RUN
  # ========================================================================
  # ------------------------------------------------------------------------
  #                          Build De Bruijn Graph
  # ------------------------------------------------------------------------
  if not nobuild:
    t = time.time()
    build_graph(KS_SEC_EXEC_PATH, workdir, readfiles, kval, graphfile, min_cov, options.genome_size, options.verbose)
    if options.verbose:
      print "Elapsed time: ", (time.time() - t), " seconds"
  
  # ------------------------------------------------------------------------
  #                          Error removal
  # ------------------------------------------------------------------------    
  t = time.time()    
  if float(options.min_relative_cov) > 0.0001: 
     error_removal(KS_SEC_EXEC_PATH, workdir, kval, graphfile, options.min_relative_cov, options.verbose)  
     exec_error_removal = True
  else:
     exec_error_removal = False
  if options.verbose:
    print "Elapsed time: ", (time.time() - t), " seconds"    

  # ------------------------------------------------------------------------
  #                        Decompose and simplify DBG
  # ------------------------------------------------------------------------
  t = time.time()
  run_modules(KS_SEC_EXEC_PATH, workdir, graphfile, kval, options.min_relative_cov, options.verbose, options.output_context, exec_error_removal)
  if options.verbose:
    print "Elapsed time: ", (time.time() - t), " seconds"
      
      
  # ------------------------------------------------------------------------
  #                             Enumerate Bubbles
  # ------------------------------------------------------------------------
  t = time.time()
  enumerate_all_bubbles(KS_SEC_EXEC_PATH, workdir, outdir, kval, output_snps, min_edit_dist, max_cycles, \
                        UL_MAX, LL_MAX, LL_MIN, float(options.timeout), options.nbprocs, options.verbose, \
                        options.output_context, options.output_path)
  if options.verbose:
    print "Elapsed time: ", (time.time() - t), " seconds"
      
      
  # ------------------------------------------------------------------------
  #            Check read coverage (optionnal) and sort bubbles
  # ------------------------------------------------------------------------
  t = time.time()
  nb = []
  if only_graph:
    nb = sort_all_bubbles(KS_SEC_EXEC_PATH, readfiles, workdir, outdir, kval, output_snps, infix_name, options.verbose)
  else:
    nb = check_read_coverage_and_sort_all_bubbles(KS_SEC_EXEC_PATH, readfiles, workdir, outdir, kval, output_snps, infix_name, countsMethod,  options.mism, options.verbose)
  if options.verbose:
    print "Elapsed time: ", (time.time() - t), " seconds\n"
  
 
  # ------------------------------------------------------------------------
  #                           Manage unfinished BCCs
  # ------------------------------------------------------------------------
  if len(unfinished_bccs) != 0:     
      print "\t\t ******** There were " + str(len(unfinished_bccs)) + " BCCs with unfinished enumeration ********"
      if options.keep_ubccs == 0 :
          print "\t\t ******** Re-run with `-u` to retrieve the unfinished components ********\n"
      else:  
          save_unfinished_bccs(unfinished_bccs, KS_SEC_EXEC_PATH, workdir, outdir, options.verbose)
              
  # ------------------------------------------------------------------------
  #                 Output number of events of each type
  # ------------------------------------------------------------------------
  print "\t\t TYPES:"
  if output_snps:
    print "\t\t\t 0: SNPs, Inexact Repeats or sequencing substitution errors, "+str(nb[0])+" found"
  else:
    print "\t\t\t 0: With -s option, SNPs and sequencing substitution where not searched"
  print "\t\t\t 1: Alternative Splicing Events, "+str(nb[1])+" found"
  print "\t\t\t 2: Inexact Tandem Repeats, "+str(nb[2])+" found"
  print "\t\t\t 3: Short Indels (<3nt), "+str(nb[3])+" found"
  print "\t\t\t 4: All others, composed by a shorter path of length > 2k-1 not being a SNP, "+str(nb[4])+" found"  
   
  # ------------------------------------------------------------------------
  #                           Clean tmp directory
  # ------------------------------------------------------------------------
       
  if options.output_path: # move paths file to outdir
      shutil.move(workdir+"/all_paths_k"+str(kval), outdir + "/all_paths_k"+str(kval))

  cleanTmp(workdir)
# print workdir
   
if __name__ == '__main__':
    main()
    
