#!/usr/bin/env python3 # *************************************************************************** # * Authors: Carlos Oscar S. Sorzano (coss@cnb.csic.es) # * David Maluenda (dmaluenda@cnb.csic.es) # * # * # * This program is free software; you can redistribute it and/or modify # * it under the terms of the GNU General Public License as published by # * the Free Software Foundation; either version 2 of the License, or # * (at your option) any later version. # * # * This program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU General Public License for more details. # * # * You should have received a copy of the GNU General Public License # * along with this program; if not, write to the Free Software # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA # * 02111-1307 USA # * # * All comments concerning this program package may be sent to the # * e-mail address 'scipion@cnb.csic.es' # ***************************************************************************/ import distutils.spawn import glob import json import os import re import shutil import sys import subprocess from datetime import datetime # ----K-E-E-P----U-P-D-A-T-E-D---- # #################################### XMIPP_VERSION = '3.20.07' # XMIPP_VERNAME = 'Boreas' # RELEASE_DATE = '14/10/2020' # #################################### XMIPP = 'xmipp' XMIPP_CORE = 'xmippCore' XMIPP_VIZ = 'xmippViz' SCIPION_EM_XMIPP = 'scipion-em-xmipp' CUFFTADVISOR = 'cuFFTAdvisor' CTPL = 'CTPL' XMIPP_SCRIPT_VERSION = '' REPOSITORIES = {XMIPP: 'https://github.com/I2PC/xmipp.git', XMIPP_CORE: 'https://github.com/I2PC/xmippCore.git', XMIPP_VIZ: 'https://github.com/I2PC/xmippViz.git', SCIPION_EM_XMIPP: 'https://github.com/I2PC/scipion-em-xmipp.git', CUFFTADVISOR: 'https://github.com/DStrelak/cuFFTAdvisor.git', CTPL: 'https://github.com/vit-vit/CTPL.git'} # {dep-repo: (branch, check-conf)} DEPENDENCIES = {CUFFTADVISOR: ('master', 'CUDA'), CTPL: ('master', None)} CONFIG_FILE_NAME = "xmipp.conf" # if a skippable compilation fails (if 'key' found in the failed code), # a hint is printed in order to export 'value' or edit the config file SKIPPABLE_BINS = {"optical_alignment": "OPENCV=False", "volume_homogenizer": "OPENCV=False", "cuda": "CUDA=False"} XMIPP_ENV = {} def updateXmippEnv(pos='begin', realPath=True, **kwargs): """ Add/update a variable in XMIPP_ENV dictionary pos = {'begin', 'end', 'replace'} """ for key, value in kwargs.items(): isString = isinstance(value, str) if isString and realPath: value = os.path.realpath(value) if key in XMIPP_ENV: if pos == 'begin' and isString: XMIPP_ENV[key] = value + os.pathsep + XMIPP_ENV[key] elif pos == 'end' and isString: XMIPP_ENV[key] = XMIPP_ENV[key] + os.pathsep + value elif pos == 'replace': XMIPP_ENV[key] = str(value) else: XMIPP_ENV[key] = str(value) def checkGithubConnection(): from http.client import HTTPConnection from socket import gaierror conn = HTTPConnection("www.github.com", timeout=3) try: conn.request("HEAD", "/") return True except gaierror: return False finally: conn.close() def stampVersion(): LAST_COMPILATION = datetime.now().strftime("%d/%m/%Y") def getCommit(repo): """ In devel mode 'commit.info' should not exist. In production mode 'commit.info' is added by tar.py """ commitFn = os.path.join('src', repo, 'commit.info') notFoundInfo = "(no git repo detected)" if os.path.isfile(commitFn): with open(commitFn, 'r') as file: commitInfo = file.readline() elif ensureGit(False): found, br, hsh = getCurrentBranch(os.path.join('src', repo), getHash=True) commitInfo = "%s (%s)" % (br, hsh) if found else notFoundInfo else: commitInfo = notFoundInfo return commitInfo compilingInfo = {'XMIPP_VERSION': XMIPP_VERSION, 'RELEASE_DATE': RELEASE_DATE, 'XMIPP_BRANCH': getCommit(XMIPP), 'PLUGIN_BRANCH': getCommit(SCIPION_EM_XMIPP), 'CORE_BRANCH': getCommit(XMIPP_CORE), 'VIZ_BRANCH': getCommit(XMIPP_VIZ), 'LAST_COMPILATION': LAST_COMPILATION, 'XMIPP_VERNAME': XMIPP_VERNAME } versionBinFn = os.path.join('src', 'xmipp', 'applications', 'programs', 'version', 'version.cpp') createDir(os.path.dirname(versionBinFn)) with open(versionBinFn, 'w') as f: f.write("""// Auto-generated code to get compilation Info #include #include #include using namespace std; int main(int argc, char** argv){ if (argc>2) { std::cout << "Incorrect parameter" << std::endl; return 1; } int shrt = 0; if (argc>1) { if((strcmp(argv[1], "--short") == 0)) { shrt = 1; }else{ std::cout << "Incorrect parameter: " << argv[1] << std::endl; return 2; } } if (shrt==1) { std::cout << "%(XMIPP_VERSION)s" << std::endl; }else{ struct utsname utsname; // stores the data returned by uname() struct utsname *utsname_ptr = &utsname; // pointer to the struct holding the data returned by uname() int ret; ret = uname(utsname_ptr); std::cout << std::endl; std::cout << " \033[4mXmipp version\033[24m: \033[1m%(XMIPP_VERSION)s (%(XMIPP_VERNAME)s)\033[0m" << std::endl; std::cout << std::endl; std::cout << " Release date: %(RELEASE_DATE)s" << std::endl; std::cout << " Xmipp branch: %(XMIPP_BRANCH)s" << std::endl; std::cout << " Plugin branch: %(PLUGIN_BRANCH)s" << std::endl; std::cout << " Core branch: %(CORE_BRANCH)s" << std::endl; std::cout << " Viz branch: %(VIZ_BRANCH)s" << std::endl; std::cout << " Compilation date: %(LAST_COMPILATION)s" << std::endl; std::cout << " Compiler: g++ " << __VERSION__ << std::endl; std::cout << " Compiling system: " << utsname.machine << " " << utsname.sysname << " " << utsname.release << std::endl << " " << utsname.version << std::endl; std::cout << std::endl; } return 0; } """ % compilingInfo) def whereis(program, findReal=False, env=None): programPath=distutils.spawn.find_executable(program, path=env) if programPath: if findReal: programPath = os.path.realpath(programPath) return os.path.dirname(programPath) else: return None def createDir(dirname): if not os.path.exists(dirname): os.makedirs(dirname) def checkProgram(programName, show=True): systems = ["Ubuntu/Debian","ManjaroLinux"] try: osInfo = subprocess.Popen(["lsb_release", "--id"], stdout=subprocess.PIPE, env=os.environ).stdout.read().decode("utf-8") osName = osInfo.split('\t')[1].strip('\n') osId = -1 # no default OS for idx, system in enumerate(systems): if osName in system: osId = idx except: osId = -1 systemInstructions = {} # Ubuntu/Debian ; ManjaroLinux systemInstructions["git"] = ["sudo apt-get -y install git","sudo pacman -Syu --noconfirm git"] systemInstructions["gcc"] = ["sudo apt-get -y install gcc","sudo pacman -Syu --noconfirm gcc"] systemInstructions["g++"] = ["sudo apt-get -y install g++","sudo pacman -Syu --noconfirm g++"] systemInstructions["mpicc"] = ["sudo apt-get -y install libopenmpi-dev","sudo pacman -Syu --noconfirm openmpi"] systemInstructions["mpicxx"] = ["sudo apt-get -y install libopenmpi-dev","sudo pacman -Syu --noconfirm openmpi"] systemInstructions["scons"] = ['sudo apt-get -y install scons or make sure that Scipion Scons is in the path',"sudo pacman -Syu --noconfirm scons"] systemInstructions["javac"] = ['sudo apt-get -y install default-jdk default-jre',"sudo pacman -Syu --noconfirm jre"] systemInstructions["rsync"] = ["sudo apt-get -y install rsync" , "sudo pacman -Syu --noconfirm rsync"] systemInstructions["pip"] = ["sudo apt-get -y install python3-pip" , "sudo pacman -Syu --noconfirm pip"] systemInstructions["make"] = ["sudo apt-get -y install make" , "sudo pacman -Syu --noconfirm make"] ok=True cont = True if not whereis(programName): if cont: if show: print(red("Cannot find '%s'." % os.path.basename(programName))) idx=0 if programName in systemInstructions: if osId >= 0: print(red(" - %s OS detected, please try: %s" % (systems[osId], systemInstructions[programName][osId]))) else: print(red(" Do:")) for instructions in systemInstructions[programName]: print(red(" - In %s: %s"%(systems[idx],instructions))) idx+=1 print("\nRemember to re-run './xmipp config' after install new software in order to " "take into account the new system configuration.") ok = False else: ok = False return ok def green(text): return "\033[92m "+text+"\033[0m" def yellow(text): return "\033[93m " + text + "\033[0m" def red(text): return "\033[91m "+text+"\033[0m" def blue(text): return "\033[34m "+text+"\033[0m" def runJob(cmd, cwd='./', show_output=True, log=None, show_command=True, inParallel=False): if show_command: print(green(cmd)) p = subprocess.Popen(cmd, cwd=cwd, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) while not inParallel: output = p.stdout.readline().decode("utf-8") if output == '' and p.poll() is not None: break if output: l = output.rstrip() if show_output: print(l) if log is not None: log.append(l) if inParallel: return p else: return 0 == p.poll() def cleanSources(): for dep in DEPENDENCIES.keys(): runJob("rm -rf src/%s" % dep) runJob("rm -rf src/scipion-em-xmipp") runJob("rm -rf src/xmippCore") runJob("rm -rf src/xmippViz") runJob("rm -rf src/xmipp/bin") runJob("rm -rf src/xmipp/lib") runJob("rm -rf src/xmipp/.sconsign.dblite") if ensureGit(False): runJob("git stash") # to get exactly like in repo def cleanBinaries(): for ext in ['so', 'os', 'o']: runJob('find src/* -name "*.%s" -exec rm -rf {} \;' % ext) runJob('find . -iname "*.pyc" -delete') runJob("rm -rf %s build" % CONFIG_FILE_NAME) # I'm getting ValueError : unsupported pickle protocol: 5' when switching from one python version to another # This seems to be cached at dblite file. runJob('find . -iname "*.dblite" -delete') def checkout(branch): r, currentBranch = getCurrentBranch() if currentBranch == branch: return True if isRepositoryClean() and runJob("git checkout %s" % branch): return True print(red("Cannot checkout branch '%s'. Remaining on the branch '%s'." % (branch, currentBranch))) return False def isRepositoryClean(showError=True): log = [] words = ['working directory clean', 'working tree clean'] runJob('git status', show_output=False, show_command=False, log=log) result = any(w in l for w in words for l in log) # True for clean repo, False otherwise if showError and not result: print(red('Repository contains uncommitted changes.')) print("Use 'compileAndInstall' mode to keep developing.") return result def pull(): isRemoteBranch = runJob('git rev-parse HEAD@{upstream}', show_command=False, show_output=False) if checkGithubConnection() and isRemoteBranch: return runJob("git pull", show_command=False) return True # meaning that this is a local branch or we are offline, so pull doesn't make sense def cloneOrCheckout(repo, branch): repo_dir = os.path.join('src', repo) if repo != XMIPP else '.' if branch not in getAllBranches(repo): branch = getBranch(repo, repo_dir)[1] if not os.path.exists(repo_dir): # If the repo doesn't exist, just clone the whole repo if branch is None: # let the git client to decide what is the default branch return runJob("git clone %s %s" % (REPOSITORIES[repo], repo_dir)) else: return runJob("git clone -b %s %s %s" % (branch, REPOSITORIES[repo], repo_dir)) else: workDir = os.getcwd() os.chdir(repo_dir) print(green('Checkouting ' + repo + ' ...')) res = checkout(branch) and pull() os.chdir(workDir) return res def getCurrentTravisBranch(): # see https://docs.travis-ci.com/user/environment-variables/ # On Travis, PR will have the TRAVIS_PULL_REQUEST_BRANCH variable non-empty # otherwise the TRAVIS_BRANCH will hold the name of the current branch if 'TRAVIS_PULL_REQUEST_BRANCH' in os.environ and 'TRAVIS_BRANCH' in os.environ: current_branch = os.environ['TRAVIS_PULL_REQUEST_BRANCH'] or os.environ['TRAVIS_BRANCH'] print(green("Detected branch: " + current_branch)) return True, current_branch return False, None def getCurrentBranch(cwd='./', getHash=False): """ If getHash=True: return (success, branch, hash) If getHash=False: return (success, branch) """ outBranchArgs = 2 if getHash else 1 log = [] commit = [] if not os.path.exists(cwd) or not runJob('git rev-parse --short HEAD', cwd=cwd, show_output=False, show_command=False, log=commit): return (False,) + (None,)*outBranchArgs runJob('git name-rev ' + commit[0], cwd=cwd, show_output=False, show_command=False, log=log) if log: # log contains commit_space_branchName return (True,) + tuple(log[0].split()[::-1][0:outBranchArgs]) print(red('Cannot get current branch')) return (False,) + (None,)*outBranchArgs def getAllBranches(repo): log = [] prefix = 'refs/heads/' result = runJob('git ls-remote -h %s' % REPOSITORIES[repo], show_output=False, log=log, show_command=False) if result: branches = [l.split(prefix)[1] for l in log] return True, branches print(red('Cannot list branches for ' + repo)) return False, None def getBranch(repo, repo_dir): # return current branch r, branch = getCurrentBranch(repo_dir) if r: return r, branch # return same branch as xmipp # we need to get current branch of the xmipp r1, branchHint = (getCurrentTravisBranch() if 'TRAVIS' in os.environ else getCurrentBranch()) r2, branches = getAllBranches(repo) if r1 and r2 and branchHint in branches: return True, branchHint # default branch return getDefaultBranch(repo) def getDefaultBranch(repo): log = [] key = 'HEAD branch:' # this might not work for git < 1.8.5, # see https://stackoverflow.com/a/32503667/5484355 # and https://stackoverflow.com/questions/2832269/git-remote-head-is-ambiguous # In such a case we return None (and during e.g. clone the client should # decide what is the default branch) result = runJob('git remote show %s' % REPOSITORIES[repo], show_output=False, log=log, show_command=False) if result: for l in log: if key in l: branch = l.split(key)[1] # HEAD branch: devel return (True, branch.strip()) print(red('Cannot auto-detect default branch for ' + repo + '. Maybe git version < 1.8.5?')) return (False, None) def getSources(branch): print("Getting sources -------------------------------------") ensureGit(True) createDir("src") repos = [XMIPP_CORE, XMIPP_VIZ, SCIPION_EM_XMIPP] if 'TRAVIS' not in os.environ: # on Travis, do not change current commit repos.append(XMIPP) for r in repos: if not cloneOrCheckout(r, branch): print(red("Cannot get the sources")) return False return True def is_config_true(key): return configDict and (key in configDict) and (configDict[key] == 'True') def getDependencies(): print("Getting Dependencies -------------------------------------") ensureGit(True) createDir("src") result = True for dep, args in DEPENDENCIES.items(): branch = args[0] configChecked = not args[1] or is_config_true(args[1]) if configChecked and result: result = cloneOrCheckout(dep, branch) if not result: print(red("Cannot get dependencies")) return result def getScipionHome(): """ Returns SCIPION_HOME, the directory for scipion3 or EMPTY str. """ return os.environ.get("SCIPION_HOME", whereis("scipion3")) or '' def readConfigFile(fnConfig): try: from ConfigParser import ConfigParser, ParsingError except ImportError: from configparser import ConfigParser, ParsingError # Python 3 retval = None cf = ConfigParser() cf.optionxform = str # keep case (stackoverflow.com/questions/1611799) try: if os.path.isdir(fnConfig): if os.path.exists(os.path.join(fnConfig,CONFIG_FILE_NAME)): fnConfig = os.path.join(fnConfig,CONFIG_FILE_NAME) else: fnConfig = os.path.join(fnConfig, "xmipp.template") if os.path.exists(fnConfig): cf.read(fnConfig) if not 'BUILD' in cf.sections(): print(red("Cannot find section BUILD in %s"%fnConfig)) return retval return dict(cf.items('BUILD')) except: sys.exit("%s\nPlease fix the configuration file %s." % (sys.exc_info()[1],fnConfig)) return retval def createEmptyConfig(): labels = ['CC','CXX','LINKERFORPROGRAMS','INCDIRFLAGS','LIBDIRFLAGS','CCFLAGS','CXXFLAGS', 'LINKFLAGS','PYTHONINCFLAGS','MPI_CC','MPI_CXX','MPI_RUN','MPI_LINKERFORPROGRAMS','MPI_CXXFLAGS', 'MPI_LINKFLAGS','NVCC','CXX_CUDA','NVCC_CXXFLAGS','NVCC_LINKFLAGS', 'MATLAB_DIR','CUDA','DEBUG','MATLAB','OPENCV','OPENCVSUPPORTSCUDA','OPENCV3', 'JAVA_HOME','JAVA_BINDIR','JAVAC','JAR','JNI_CPPPATH', 'USE_DL', 'VERIFIED', 'CONFIG_VERSION', 'PYTHON_LIB'] configDict = {} for label in labels: # We let to set up the xmipp configuration via environ. configDict[label] = os.environ.get(label, "") return configDict def findFileInDirList(fnH, dirlist): """ :returns the dir where found or an empty string if not found. dirs can contain *, then first found is returned. """ if isinstance(dirlist, str): dirlist = [dirlist] for dir in dirlist: validDirs = glob.glob(os.path.join(dir, fnH)) if len(validDirs) > 0: return os.path.dirname(validDirs[0]) return '' def configOpenCV(configDict): cppProg="#include \n" cppProg+="int main(){}\n" with open("xmipp_test_opencv.cpp", "w") as cppFile: cppFile.write(cppProg) if not runJob("%s -c -w %s xmipp_test_opencv.cpp -o xmipp_test_opencv.o %s" % (configDict["CXX"], configDict["CXXFLAGS"], configDict["INCDIRFLAGS"]), show_output=False): print(yellow("OpenCV not found")) configDict["OPENCV"]=False configDict["OPENCVSUPPORTSCUDA"]=False configDict["OPENCV3"]=False else: configDict["OPENCV"]=True # Check version with open("xmipp_test_opencv.cpp", "w") as cppFile: cppFile.write('#include \n') cppFile.write('#include \n') cppFile.write('int main()' '{std::ofstream fh;' ' fh.open("xmipp_test_opencv.txt");' ' fh << CV_MAJOR_VERSION << std::endl;' ' fh.close();' '}\n') if not runJob("%s -w %s xmipp_test_opencv.cpp -o xmipp_test_opencv %s " % (configDict["CXX"], configDict["CXXFLAGS"], configDict["INCDIRFLAGS"]), show_output=False): configDict["OPENCV3"]=False version = 2 # Just in case else: runJob("./xmipp_test_opencv") f=open("xmipp_test_opencv.txt") versionStr=f.readline() f.close() version=int(versionStr.split('.',1)[0]) configDict["OPENCV3"]=version>=3 # Check CUDA Support cppProg="#include \n" cppProg+= "#include \n" if configDict["OPENCV3"] else "#include \n" cppProg+="int main(){}\n" with open("xmipp_test_opencv.cpp", "w") as cppFile: cppFile.write(cppProg) configDict["OPENCVSUPPORTSCUDA"]=runJob("%s -c -w %s xmipp_test_opencv.cpp -o xmipp_test_opencv.o %s"%\ (configDict["CXX"],configDict["CXXFLAGS"],configDict["INCDIRFLAGS"]), show_output=False) print(green("OPENCV-%s detected %s CUDA support" % (version, 'with' if configDict["OPENCVSUPPORTSCUDA"] else 'without'))) runJob("rm -v xmipp_test_opencv*",show_output=False) def getDependenciesInclude(): return ['../'] def askPath(default='', ask=True): question = "type a path where to locate it" if ask: if default: print(yellow("Alternative found at '%s'." % default)) question = "press [return] to use it or " + question else: question = question+" or press [return] to continue" result = input(yellow("Please, "+question+": ")) if not result and default: print(green(" -> "+default)) print() return result if result else default else: if default: print(yellow("Using '%s'." % default)) else: print(red("No alternative found in the system.")) return default def checkLib(gxx, libFlag): """ Returns True if lib is found. """ result = runJob('echo "int main(){}" > xmipp_check_lib.cpp ; ' + gxx + ' ' + libFlag + ' xmipp_check_lib.cpp', show_output=False, show_command=False) os.remove('xmipp_check_lib.cpp') os.remove('a.out') if os.path.isfile('a.out') else None return result def configCompiler(configDict): if configDict["DEBUG"] == "": configDict["DEBUG"] = "False" if configDict["CC"] == "" and checkProgram("gcc"): configDict["CC"] = "gcc" print(green('gcc detected')) if configDict["CXX"] == "": if 'TRAVIS' in os.environ: # on TRAVIS, we can use cache to speed up the build configDict["CXX"] = "ccache g++" if checkProgram("g++") else "" else: configDict["CXX"] = "g++" if checkProgram("g++") else "" if configDict["LINKERFORPROGRAMS"] == "": if 'TRAVIS' in os.environ: # on TRAVIS, we can use cache to speed up the build configDict["LINKERFORPROGRAMS"] = "ccache g++" if checkProgram("g++") else "" else: configDict["LINKERFORPROGRAMS"] = "g++" if checkProgram("g++") else "" if configDict["CC"] == "gcc": if not "-std=c99" in configDict["CCFLAGS"]: configDict["CCFLAGS"] += " -std=c99" if 'g++' in configDict["CXX"]: configDict["CXXFLAGS"] += " -mtune=native -march=native" # optimize for current machine if "-std=c99" not in configDict["CXXFLAGS"]: configDict["CXXFLAGS"] += " -std=c++11" if 'TRAVIS' in os.environ: configDict["CXXFLAGS"] += " -Werror" # don't tolerate any warnings on build machine configDict["CXXFLAGS"] += " -O0" # don't optimize on Travis, as it slows down the build else: configDict["CXXFLAGS"] += " -O3" if is_config_true("DEBUG"): configDict["CXXFLAGS"] += " -g" # Nothing special to add to LINKFLAGS from sysconfig import get_paths info = get_paths() if configDict["LIBDIRFLAGS"] == "": localLib = "%s/lib" % info['data'] # /usr/local/lib or /path/to/virtEnv/lib configDict["LIBDIRFLAGS"] = "-L%s" % localLib updateXmippEnv(LD_LIBRARY_PATH=localLib) # extra libs hdf5InLocalLib = findFileInDirList("libhdf5*", localLib) isHdf5CppLinking = checkLib(configDict['CXX'], '-lhdf5_cpp') isHdf5Linking = checkLib(configDict['CXX'], '-lhdf5') if not (hdf5InLocalLib or (isHdf5CppLinking and isHdf5Linking)): print(yellow("\n'libhdf5' not found at '%s'." % localLib)) hdf5Lib = findFileInDirList("libhdf5*", ["/usr/lib", "/usr/lib/x86_64-linux-gnu"]) hdf5Lib = askPath(hdf5Lib, askUser) if hdf5Lib: configDict["LIBDIRFLAGS"] += " -L%s" % hdf5Lib updateXmippEnv(LD_LIBRARY_PATH=hdf5Lib) else: installDepConda('hdf5') if not checkLib(configDict['CXX'], '-lfftw3'): print(red("'libfftw3' not found in the system")) installDepConda('fftw') if not checkLib(configDict['CXX'], '-ltiff'): print(red("'libtiff' not found in the system")) installDepConda('libtiff') if configDict["INCDIRFLAGS"] == "": localInc = "%s/include" % info['data'] # /usr/local/include or /path/to/virtEnv/include configDict["INCDIRFLAGS"] += ' '.join(map(lambda x: '-I' + str(x), getDependenciesInclude())) configDict["INCDIRFLAGS"] += " -I%s" % localInc # extra includes if not findFileInDirList("hdf5.h", [localInc, "/usr/include"]): print(yellow("\nHeaders for 'libhdf5' not found at '%s'." % localInc)) hdf5Inc = findFileInDirList("hdf5.h", "/usr/include/hdf5/serial") # Add more candidates if needed hdf5Inc = askPath(hdf5Inc, askUser) if hdf5Inc: configDict["INCDIRFLAGS"] += " -I%s" % hdf5Inc if configDict["PYTHON_LIB"] == "": # malloc flavour is not needed from 3.8 malloc = "m" if sys.version_info.minor < 8 else "" configDict["PYTHON_LIB"] = "python%s.%s%s" % (sys.version_info.major, sys.version_info.minor, malloc) if configDict["PYTHONINCFLAGS"] == "": import numpy incDirs = [info['include'], numpy.get_include()] configDict["PYTHONINCFLAGS"] = ' '.join(["-I%s" % iDir for iDir in incDirs]) configDict["OPENCV"] = os.environ.get("OPENCV", "") if configDict["OPENCV"] == "" or configDict["OPENCVSUPPORTSCUDA"] or configDict["OPENCV3"]: configOpenCV(configDict) def getHdf5Name(libdirflags): libdirs=libdirflags.split("-L") for dir in libdirs: if os.path.exists(os.path.join(dir.strip(),"libhdf5.so")): return "hdf5" elif os.path.exists(os.path.join(dir.strip(),"libhdf5_serial.so")): return "hdf5_serial" return "hdf5" def checkCompiler(configDict): print("Checking compiler configuration ...") ensureCompilerVersion(configDict["CXX"]) cppProg=""" #include #include #include #include #include #include #include #include """ if configDict["OPENCV"]=="True": cppProg+="#include \n" if configDict["OPENCVSUPPORTSCUDA"]=="True": if configDict["OPENCV3"]=="True": cppProg+="#include \n" else: cppProg+="#include \n" cppProg+="\n int main(){}\n" with open("xmipp_test_main.cpp", "w") as cppFile: cppFile.write(cppProg) if not runJob("%s -c -w %s xmipp_test_main.cpp -o xmipp_test_main.o %s %s"%\ (configDict["CXX"],configDict["CXXFLAGS"],configDict["INCDIRFLAGS"],configDict["PYTHONINCFLAGS"])): print(red("Check the INCDIRFLAGS, CXX, CXXFLAGS and PYTHONINCFLAGS in xmipp.conf")) # FIXME: Check the dependencies list print(red("If some of the libraries headers fail, try installing fftw3_dev, tiff_dev, jpeg_dev, sqlite_dev, hdf5, pthread")) return False libhdf5=getHdf5Name(configDict["LIBDIRFLAGS"]) if not runJob("%s %s %s xmipp_test_main.o -o xmipp_test_main -lfftw3 -lfftw3_threads -l%s -lhdf5_cpp -ltiff -ljpeg -lsqlite3 -lpthread" % \ (configDict["LINKERFORPROGRAMS"], configDict["LINKFLAGS"], configDict["LIBDIRFLAGS"],libhdf5)): print(red("Check the LINKERFORPROGRAMS, LINKFLAGS and LIBDIRFLAGS")) return False runJob("rm xmipp_test_main*") return True def configMPI(configDict): mpiBinCandidates = [os.environ.get('MPI_BINDIR', 'None'), '/usr/lib/openmpi/bin', '/usr/lib64/openmpi/bin'] if configDict["MPI_RUN"] == "": if checkProgram("mpirun", False): configDict["MPI_RUN"] = "mpirun" print(green("'mpirun' detected.")) elif checkProgram("mpiexec", False): configDict["MPI_RUN"] = "mpiexec" print(green("'mpiexec' detected.")) else: print(yellow("\n'mpirun' and 'mpiexec' not found in the PATH")) mpiDir = findFileInDirList('mpirun', mpiBinCandidates) mpiDir = askPath(mpiDir, askUser) if mpiDir: configDict["MPI_RUN"] = os.path.join(mpiDir, "mpirun") checkProgram(configDict["MPI_RUN"]) updateXmippEnv(PATH=mpiDir) if configDict["MPI_CC"] == "": if checkProgram("mpicc", False): configDict["MPI_CC"] = "mpicc" print(green("'mpicc' detected.")) else: print(yellow("\n'mpicc' not found in the PATH")) mpiDir = findFileInDirList('mpicc', mpiBinCandidates) mpiDir = askPath(mpiDir, askUser) if mpiDir: configDict["MPI_CC"] = os.path.join(mpiDir, "mpicc") checkProgram(configDict["MPI_CC"]) if configDict["MPI_CXX"] == "": if checkProgram("mpicxx", False): configDict["MPI_CXX"] = "mpicxx" print(green("'mpicxx' detected.")) else: print(yellow("\n'mpicxx' not found in the PATH")) mpiDir = findFileInDirList('mpicxx', mpiBinCandidates) mpiDir = askPath(mpiDir, askUser) if mpiDir: configDict["MPI_CXX"] = os.path.join(mpiDir, "mpicxx") checkProgram(configDict["MPI_CXX"]) mpiLib_env = os.environ.get('MPI_LIBDIR', '') if mpiLib_env: configDict['MPI_CXXFLAGS'] += ' -L'+mpiLib_env mpiInc_env = os.environ.get('MPI_INCLUDE', '') if mpiInc_env: configDict['MPI_CXXFLAGS'] += ' -I'+mpiInc_env if configDict["MPI_LINKERFORPROGRAMS"] == "": configDict["MPI_LINKERFORPROGRAMS"] = configDict["MPI_CXX"] def checkMPI(configDict): print("Checking MPI configuration ...") cppProg=""" #include int main(){} """ with open("xmipp_mpi_test_main.cpp", "w") as cppFile: cppFile.write(cppProg) if not runJob("%s -c -w %s %s %s xmipp_mpi_test_main.cpp -o xmipp_mpi_test_main.o" % (configDict["MPI_CXX"], configDict["INCDIRFLAGS"], configDict["CXXFLAGS"], configDict["MPI_CXXFLAGS"])): print(red("MPI compilation failed. Check the INCDIRFLAGS, MPI_CXX and CXXFLAGS in 'xmipp.conf'")) print(red("In addition, MPI_CXXFLAGS can also be used to add flags to MPI compilations." "'%s --showme:compile' might help" % configDict['MPI_CXX'])) return False libhdf5=getHdf5Name(configDict["LIBDIRFLAGS"]) if not runJob("%s %s %s %s xmipp_mpi_test_main.o -o xmipp_mpi_test_main " "-lfftw3 -lfftw3_threads -l%s -lhdf5_cpp -ltiff -ljpeg -lsqlite3 -lpthread" % (configDict["MPI_LINKERFORPROGRAMS"], configDict["LINKFLAGS"], configDict["MPI_LINKFLAGS"], configDict["LIBDIRFLAGS"], libhdf5)): print(red("Check the LINKERFORPROGRAMS, LINKFLAGS and LIBDIRFLAGS")) print(red("In addition, MPI_LINKFLAGS can also be used to add flags to MPI links. " "'%s --showme:compile' might help" % configDict['MPI_CXX'])) return False runJob("rm xmipp_mpi_test_main*") echoString = blue(" > This sentence should be printed 2 times if mpi runs fine") if not (runJob("%s -np 2 echo '%s.'" % (configDict['MPI_RUN'], echoString)) or runJob("%s -np 2 --allow-run-as-root echo '%s.'" % (configDict['MPI_RUN'], echoString))): print(red("mpirun or mpiexec have failed.")) return False return True def configJava(configDict): if configDict["JAVA_HOME"]=="": javaProgramPath = whereis('javac', findReal=True) if not javaProgramPath: print(yellow("\n'javac' not found in the PATH")) javaProgramPath = findFileInDirList('javac', ['/usr/lib/jvm/java-*/bin']) # put candidates here javaProgramPath = askPath(javaProgramPath, askUser) if not os.path.isdir(javaProgramPath): installDepConda('openjdk') javaProgramPath = whereis('javac', findReal=True) if javaProgramPath: updateXmippEnv(PATH=javaProgramPath) javaHomeDir = javaProgramPath.replace("/jre/bin", "") javaHomeDir = javaHomeDir.replace("/bin", "") configDict["JAVA_HOME"] = javaHomeDir if configDict["JAVA_BINDIR"]=="" and configDict["JAVA_HOME"]: configDict["JAVA_BINDIR"]="%(JAVA_HOME)s/bin" if configDict["JAVAC"]=="" and configDict["JAVA_HOME"]: configDict["JAVAC"]="%(JAVA_BINDIR)s/javac" if configDict["JAR"]=="" and configDict["JAVA_HOME"]: configDict["JAR"]="%(JAVA_BINDIR)s/jar" if configDict["JNI_CPPPATH"]=="" and configDict["JAVA_HOME"]: configDict["JNI_CPPPATH"]="%(JAVA_HOME)s/include:%(JAVA_HOME)s/include/linux" if (os.path.isfile((configDict["JAVAC"] % configDict) % configDict) and os.path.isfile((configDict["JAR"] % configDict) % configDict) and os.path.isdir("%(JAVA_HOME)s/include" % configDict)): print(green("Java detected at: %s" % configDict["JAVA_HOME"])) else: print(red("No development environ for 'java' found. " "Please, check JAVA_HOME, JAVAC, JAR and JNI_CPPPATH variables.")) def checkJava(configDict): if not checkProgram(configDict['JAVAC']): return False print("Checking Java configuration...") javaProg=""" public class Xmipp { public static void main(String[] args) {} } """ with open("Xmipp.java", "w") as javaFile: javaFile.write(javaProg) if not runJob("%s Xmipp.java" % configDict["JAVAC"]): print(red("Check the JAVAC")) return False runJob("rm Xmipp.java Xmipp.class") cppProg=""" #include int dummy(){} """ with open("xmipp_jni_test.cpp", "w") as cppFile: cppFile.write(cppProg) incs="" for x in configDict['JNI_CPPPATH'].split(':'): incs+=" -I"+x if not runJob("%s -c -w %s %s xmipp_jni_test.cpp -o xmipp_jni_test.o"%\ (configDict["CXX"],incs,configDict["INCDIRFLAGS"])): print(red("Check the JNI_CPPPATH, CXX and INCDIRFLAGS")) return False runJob("rm xmipp_jni_test*") return True def configCuda(configDict): configDict["CUDA"] = os.environ.get("CUDA", "") nvcc = 'nvcc' if configDict["CUDA"]=="": environCudaBin = os.environ.get('XMIPP_CUDA_BIN', os.environ.get('CUDA_BIN', '')) cudaBin = whereis(nvcc, findReal=True, env=environCudaBin + ':' + os.environ.get('PATH', '')) if cudaBin: configDict["CUDA"]="True" nvcc = os.path.join(cudaBin, nvcc) else: print(yellow("\n'nvcc' not found in the PATH " "(either in CUDA_BIN/XMIPP_CUDA_BIN)")) cudaBin = findFileInDirList('nvcc', ["/usr/local/cuda/bin", "/usr/local/cuda*/bin"]) # check order cudaBin = askPath(cudaBin, askUser) if os.path.isfile(os.path.join(cudaBin, 'nvcc')): configDict["CUDA"] = "True" cudaBin = os.path.realpath(cudaBin) # If using generic cuda, expliciting the version nvcc = os.path.join(cudaBin, nvcc) else: print(yellow("CUDA not found. Continuing only with CPU integration.")) configDict["CUDA"] = "False" updateXmippEnv(CUDA=configDict["CUDA"]=="True") if configDict["CUDA"]=="True": if configDict["NVCC"]=="": if checkProgram(nvcc): nvccVersion, nvccFullVersion = getCudaVersion(nvcc) print(green('CUDA-' + nvccFullVersion + ' detected.')) if nvccVersion != 8.0: print(yellow('CUDA-8.0 is recommended.')) configDict["NVCC"] = nvcc else: print(yellow("Warning: 'nvcc' not found. " "'NVCC_CXXFLAGS' and 'NVCC_LINKFLAGS' cannot be " "automatically set. Please, manual set them or " "set 'CUDA=False' in the config file.")) return if configDict["NVCC_CXXFLAGS"]=="": # in case user specified some wrapper of the compiler # get rid of it: 'ccache g++' -> 'g++' currentCxx = configDict["CXX"].split()[-1] cxxVersion, cxxStrVersion = getGccVersion(currentCxx) nvccVersion, nvccFullVersion = getCudaVersion(configDict["NVCC"]) if configDict["CXX_CUDA"] == '': # Alternative C++ compiler for cuda if current is incompatible if cxxVersion >= 6 and nvccVersion <= 8: # gcc>=6 is incompatible with cuda<=8 print(yellow("Using '%s' version later than 5, " "which is incompatible with cuda-%s." % (currentCxx, nvccVersion))) compVers = ['5', '5.5', '5.4', '5.3', '5.2', '5.1', '4.9', '4.8'] for ver in compVers: cxx4cuda = currentCxx+'-'+ver if checkProgram(cxx4cuda, False): configDict["CXX_CUDA"] = cxx4cuda break if configDict["CXX_CUDA"]: configDict["CXX_CUDA"] = askPath(configDict["CXX_CUDA"], askUser) if not checkProgram(configDict["CXX_CUDA"], False): print(red("No alternative found or not working. " "Skipping CUDA compilation.\n" "If an alternative exists, please " "'export CXX_CUDA=/path/to/g++5' and " "run again 'xmipp config'.")) configDict["CUDA"] = "False" updateXmippEnv(CUDA=False, pos='replace') return else: # TODO: Consider the CUDA-11 incompatibilities configDict["CXX_CUDA"] = currentCxx configDict["NVCC_CXXFLAGS"] = ("--x cu -D_FORCE_INLINES -Xcompiler -fPIC " "-ccbin %(CXX_CUDA)s -std=c++11 --expt-extended-lambda " # generate PTX only, and SASS at the runtime (by setting code=virtual_arch) "-gencode=arch=compute_30,code=compute_30 " "-gencode=arch=compute_35,code=compute_35 " "-gencode=arch=compute_50,code=compute_50 " "-gencode=arch=compute_60,code=compute_60 " "-gencode=arch=compute_61,code=compute_61") if configDict["NVCC_LINKFLAGS"]=="": # Looking for Cuda libraries: libDirs = ['lib', 'lib64', 'targets/x86_64-linux/lib', 'lib/x86_64-linux-gnu'] # add more condidates checkCudaLib = lambda x: os.path.isfile(x+"/libcudart.so") def searchCudaLib(root, cudaLib, ask=False): check = False for lib in libDirs: candidate = os.path.join(root, lib) if checkCudaLib(candidate): cudaLib = candidate check = True break if check: cudaLib = os.path.realpath(cudaLib) cudaLib = askPath(cudaLib, ask=ask) return cudaLib # Looking for user defined XMIPP_CUDA_LIB and CUDA_LIB cudaLib = os.environ.get('XMIPP_CUDA_LIB', os.environ.get('CUDA_LIB', '')) nvccDir = whereis(configDict["NVCC"]) if not checkCudaLib(cudaLib) and nvccDir: # Looking for Cuda libs under active nvcc. cudaLib = searchCudaLib(os.path.dirname(nvccDir), cudaLib, False) if not checkCudaLib(cudaLib): # Looking for Cuda libs in user root libs. cudaLib = searchCudaLib('/usr', cudaLib, askUser) if checkCudaLib(cudaLib): configDict["NVCC_LINKFLAGS"] = ("-L%s"%cudaLib + " -L%s/stubs"%cudaLib) # nvidia-ml is in stubs folder updateXmippEnv(LD_LIBRARY_PATH=cudaLib) updateXmippEnv(LD_LIBRARY_PATH=cudaLib+"/stubs") else: print(yellow("WARNING: system libraries for CUDA not found!\n" " If cuda code is not compiling, " "please, find 'libcudart.so' and manually add\n" " the containing folder (e.g. '/my/cuda/lib') at %s\n" " > NVCC_LINKFLAGS = -L/my/cuda/lib -L/my/cuda/lib/stubs\n" " If the problem persist, set 'CUDA=False' before " "compiling to skip cuda compilation." % (CONFIG_FILE_NAME))) def checkCuda(configDict): if configDict["CUDA"]=="True": if not checkProgram(configDict["NVCC"]): return False print("Checking CUDA configuration ...") cppProg=""" #include #include int main(){} """ with open("xmipp_cuda_test.cpp", "w") as cppFile: cppFile.write(cppProg) if not runJob("%s -c -w %s %s xmipp_cuda_test.cpp -o xmipp_cuda_test.o"%\ (configDict["NVCC"],configDict["NVCC_CXXFLAGS"],configDict["INCDIRFLAGS"])): print(red("Check the NVCC, NVCC_CXXFLAGS and INCDIRFLAGS")) return False if not runJob("%s %s xmipp_cuda_test.o -o xmipp_cuda_test -lcudart -lcublas -lcufft -lcurand -lcusparse -lnvToolsExt" % \ (configDict["NVCC"], configDict["NVCC_LINKFLAGS"])): print(red("Check the NVCC and NVCC_LINKFLAGS")) return False if not runJob("%s %s xmipp_cuda_test.o -o xmipp_cuda_test -lcudart -lcublas -lcufft -lcurand -lcusparse -lnvToolsExt" % \ (configDict["CXX"], configDict["NVCC_LINKFLAGS"])): print(red("Check the CXX and NVCC_LINKFLAGS")) return False runJob("rm xmipp_cuda_test*") return True def configMatlab(configDict): if configDict["MATLAB"]=="": if checkProgram("matlab",False): configDict["MATLAB"]="True" else: configDict["MATLAB"]="False" if configDict["MATLAB"]=="True": if configDict["MATLAB_DIR"]=="": if checkProgram("matlab"): matlabBinDir = whereis("matlab", findReal=True) updateXmippEnv(MATLAB_BIN_DIR=matlabBinDir) configDict["MATLAB_DIR"]=matlabBinDir.replace("/bin","") print(green("Matlab detected at " + matlabBinDir)) def checkMatlab(configDict): ans = True if configDict["MATLAB"]=="True": if not checkProgram("matlab"): return False print("Checking Matlab configuration ...") cppProg=""" #include int dummy(){} """ with open("xmipp_mex.cpp", "w") as cppFile: cppFile.write(cppProg) if not runJob("%s/bin/mex -silent xmipp_mex.cpp"%configDict["MATLAB_DIR"]): print(red("Check the MATLAB_DIR")) ans = False runJob("rm xmipp_mex*") return ans def writeConfig(configDict): with open(CONFIG_FILE_NAME, "w") as configFile: configFile.write("[BUILD]\n") for label in sorted(configDict.keys()): configFile.write("%s=%s\n"%(label,configDict[label])) def updateConfig(updatingDict): cmdTemplate = "sed -i -e 's/^%s=.*/%s=%s/' %s" for k, v in updatingDict.items(): print(blue("Setting %s=%s" % (k, v))) runJob(cmdTemplate % (k, k, v.replace('/', '\/'), CONFIG_FILE_NAME), show_command=False) def writeEnviron(): with open('xmippEnv.json', 'w') as f: json.dump(XMIPP_ENV, f, indent=4) def config_DL(configDict): k = 'USE_DL' if (k in configDict) and (configDict[k] != 'True'): configDict[k] = 'False' def configConfigVersion(configDict): key = 'CONFIG_VERSION' configDict[key] = getScriptVersion() def ensureConfigVersion(configDict): key = 'CONFIG_VERSION' if key not in configDict or configDict[key] != XMIPP_SCRIPT_VERSION: print(red("We did some changes which are not compatible with your current config file. " "Please, run './xmipp config' to generate a new config file." "We recommend you to create a backup before regenerating it (use --help for additional info)")) exit(-1) def config(): print("Configuring -----------------------------------------") new_config_dict = createEmptyConfig() if new_config_dict['VERIFIED'] == '': new_config_dict['VERIFIED'] = 'False' configCompiler(new_config_dict) configMPI(new_config_dict) configJava(new_config_dict) configCuda(new_config_dict) configMatlab(new_config_dict) config_DL(new_config_dict) configConfigVersion(new_config_dict) writeConfig(new_config_dict) writeEnviron() print(blue("Configuration completed.....")) return new_config_dict def checkConfig(): print("Checking configuration ------------------------------") ensureConfig() if configDict['VERIFIED'] != 'True': newConf = {} # to update the config if something fails if not checkCompiler(configDict): print(red("Cannot compile")) print("Possible solutions") # FIXME: check libraries print("In Ubuntu: sudo apt-get -y install libsqlite3-dev libfftw3-dev libhdf5-dev libopencv-dev python3-dev "\ "python3-numpy python3-scipy python3-mpi4py") print("In Manjaro: sudo pacman -Syu install hdf5 python3-numpy python3-scipy --noconfirm") print("Please, see 'https://scipion-em.github.io/docs/docs/scipion-modes/" "install-from-sources.html#step-2-dependencies' for more information about libraries dependencies.") print("\nRemember to re-run './xmipp config' after installing libraries in order to " "take into account the new system configuration.") runJob("rm xmipp_test_main*") return False if not checkMPI(configDict): print(red("Cannot compile with MPI or use it")) runJob("rm xmipp_mpi_test_main*") return False if not checkJava(configDict): print(red("Cannot compile with Java")) runJob("rm Xmipp.java Xmipp.class xmipp_jni_test*") return False if not checkCuda(configDict): print(red("Cannot compile with NVCC, continuing without CUDA")) runJob("rm xmipp_cuda_test*") # if fails, the test files remains newConf["CUDA"]="False" if not checkMatlab(configDict): print(red("Cannot compile with Matlab, continuing without Matlab")) newConf["MATLAB"]="False" runJob("rm xmipp_mex*") newConf['VERIFIED']="True" updateConfig(newConf) else: print(blue("'%s' is already checked. Set VERIFIED=False to re-checked" % CONFIG_FILE_NAME)) return True def compileModule(Nproc,module): shutil.copyfile(CONFIG_FILE_NAME,"src/%s/install/%s" % (module, CONFIG_FILE_NAME)) if module == "xmipp": stampVersion() log = [] ok = runJob("scons -j%s" % Nproc, "src/%s" % module, log=log) if not ok: failingBin = None for l in log[-30:]: # inspecting last 30 lines # expected error: 'scons: *** [some/program/to/compile] Error 1' errorRegex = re.match("scons: \*\*\* \[(.*)\] (.*Error) ([0-9]*)[: ]*(.*)", l) if errorRegex: failingBin = errorRegex.group(1) errorType = errorRegex.group(2) errorNum = errorRegex.group(3) errorMsg = errorRegex.group(4) for k, v in SKIPPABLE_BINS.items(): if k in failingBin: print(red("\nSome error found compiling '%s' program." % failingBin.split('/')[-1])) print(red("You can skip this program by including '%s' " "in the config file." % (v))) if 'unsupported pickle protocol' in errorMsg: print(red("\nThis error might be because you changed the python " "version. If so, please run './xmipp cleanBin' to " "clean up the installation and, then './xmipp' to " "re-compile it.")) errorRegex = re.match(".*: fatal error: (.*): No such file or directory", l) if errorRegex: missingPath = errorRegex.group(1) print(red("\n'%s' file not found." % missingPath)) missingDir = missingPath.split(os.path.sep)[0] missingFile = os.path.join(*missingPath.split(os.path.sep)[1:]) if missingDir in DEPENDENCIES.keys(): depDir = os.path.join('src', missingDir) hint = ('cd %s ; git checkout -- %s ; cd -' % (depDir, missingFile) if os.path.isdir(depDir) else './xmipp get_dependencies') print(red("This file belongs to %s dependency. Please, " "try '%s' and re-compile." % (missingDir, hint))) print(red("\nSome error occurred during the compilation of '%s'%s.\n" % (module, (" ('%s')" % failingBin) if failingBin else ''))) sys.exit(1) return ok def compile_cuFFTAdvisor(): advisorDir = "src/cuFFTAdvisor/" currDir = os.getcwd() libDir = "src/xmipp/lib/" createDir(libDir) os.chdir(advisorDir) ok = runJob("make all") os.chdir(currDir) return ok and runJob("cp " + advisorDir + "build/libcuFFTAdvisor.so" + " " + libDir) def compile(Nproc): if isinstance(Nproc, str) and Nproc.startswith('N='): Nproc = int(Nproc.split('N=')[1]) ensureConfig() ensureConfigVersion(configDict) if not compileDependencies(Nproc): return False return compileXmipp(Nproc) def askYesNo(msg='', default=True): r = input(msg) return (r.lower() not in ['n', 'no', '0'] if default else r.lower() in ['y', 'yes', '1']) def installDepConda(dep): condaEnv = os.environ.get('CONDA_DEFAULT_ENV', 'base') if condaEnv != 'base': if not askUser or askYesNo(yellow("'%s' dependency not found. Do you want " "to install it using conda? [YES/no] " % dep)): print(yellow("Trying to install %s with conda" % dep)) if runJob("conda activate %s ; conda install %s -y -c defaults" % (condaEnv, dep)): print(green("'%s' installed in conda environ '%s'.\n" % (dep, condaEnv))) return True return False def compileDependencies(Nproc): print("Building Dependencies -------------------------------------") result = True if is_config_true('CUDA'): cudaBinDir, nvccBaseName = os.path.split(configDict['NVCC']) # cuFFTAdvisor compilation needs 'nvcc' accessible thru the PATH if not checkProgram(nvccBaseName, False) and checkProgram(configDict['NVCC'], False): # if nvcc basename is not found but absolute path yes, adding the dir to the path. os.environ['PATH'] = os.pathsep.join([cudaBinDir, os.environ['PATH']]) result = result and (checkProgram('make', True) or installDepConda('make')) result = result and compile_cuFFTAdvisor() if not result: print(red("Cannot build dependencies")) return result def compileXmipp(Nproc): print("Compiling -------------------------------------------") if not compileModule(Nproc,"xmippCore"): return False if not compileModule(Nproc,"xmipp"): return False if not compileModule(Nproc,"xmippViz"): return False return True def runTests(testNames): if len(testNames)==0 or 'help' in testNames or '--help' in testNames: print("Usage: xmipp test op\n" "\n" " op = --show: Show how to invoke all available tests\n" " --allPrograms: Run all program tests\n" " --allFuncs: Run all function tests\n" " 'testName': Run certain test (more than one is available)." "\n") return print("Testing ---------------------------------------------") xmippSrc = os.environ.get('XMIPP_SRC', None) if xmippSrc and os.path.isdir(xmippSrc): os.environ['PYTHONPATH'] = ':'.join([ os.path.join(os.environ['XMIPP_SRC'], XMIPP), os.environ.get('PYTHONPATH', '')]) testsPath = os.path.join(os.environ['XMIPP_SRC'], XMIPP, 'tests') else: print(red('XMIPP_SRC is not in the enviroment.') + '\nTo run the tests you need to run: ' + blue('source build/xmipp.bashrc')) sys.exit(1) dataSetPath = os.path.join(testsPath, 'data') # if not os.path.isdir(dataSetPath): # createDir(dataSetPath) os.environ["XMIPP_TEST_DATA"] = dataSetPath # downloading/updating the dataset url = "http://scipion.cnb.csic.es/downloads/scipion/data/tests" dataset = 'xmipp_programs' if os.path.isdir(dataSetPath): print(blue("Updating the test files")) task = "update" else: print(blue("Downloading the test files")) task = "download" args = "%s %s %s" % ("tests/data", url, dataset) runJob("bin/xmipp_sync_data %s %s" % (task, args), cwd='src/xmipp') configDict = readConfigFile(CONFIG_FILE_NAME) noCudaStr = '--noCuda' if not is_config_true('CUDA') else '' print(" Tests to do: %s" % ', '.join(testNames)) if not runJob("(cd src/xmipp/tests; %s test.py %s %s)" % (getPython(), ' '.join(testNames), noCudaStr)): sys.exit(-1) def getPython(): if checkProgram("scipion3", False): return "scipion3 python" else: return 'python3' def addDeepLearninModel(login, modelPath='', update=None): """ Takes the folder name modelName from models dir and makes a .tgz, uploads the .tgz to scipion web. """ def usageDL(): print(""" XMIPP addModel help: This mode is used to upload a model folder to the Scipion/Xmipp server. Usually the model folder contains big files used to fed deep learning procedures with pretrained data. All the models stored in the server will be downloads using the 'get_models' mode or during the compilation/installation time if 'USE_DL=True' in the config file. [or with 'scipion3 installb deepLearningToolkit'] Usage: -> ./xmipp addModel [--update] Behaviour: 0. modelName = basename(modelsPath) <- Please, check the folder's name! 1. Packing in 'xmipp_model_modelName.tgz' 2. Check if that model already exists (use --update to override an existing model) 3. Upload the model to the server. 4. Update the MANIFEST file. The model name will be the folder name in """) sys.exit(0) if login == '--help': usageDL() modelPath = modelPath.rstrip("/") if not os.path.isdir(modelPath): print(" is not a directory. Please, check the path. \n" "The name of the model will be the name of that folder.\n") usageDL() modelName = os.path.basename(modelPath) modelsDir = os.path.dirname(modelPath) tgzFn = "xmipp_model_%s.tgz" % modelName localFn = os.path.join(modelsDir, tgzFn) print("Creating the '%s' model." % tgzFn) runJob("tar czf %s %s" % (tgzFn, modelName), cwd=modelsDir) remotePath = "scipionfiles/downloads/scipion/software/em" print("Warning: Uploading, please BE CAREFUL! This can be dangerous.") print('You are going to be connected to "%s" to write in folder ' '"%s".' % (login, remotePath)) if input("Continue? YES/no\n").lower() == 'no': sys.exit() print("Trying to upload the model using '%s' as login" % login) args = "%s %s %s %s" % (login, os.path.abspath(localFn), remotePath, update) if runJob("src/xmipp/bin/xmipp_sync_data upload %s" % args): print("'%s' model successfully uploaded! Removing the local .tgz" % modelName) runJob("rm %s" % localFn) pDLdownload = None def downloadDeepLearningModels(cwd, dedicatedMode=False): if not is_config_true('USE_DL') and not dedicatedMode: return True url = "http://scipion.cnb.csic.es/downloads/scipion/software/em" dest = "models" modelsPath = os.path.join(cwd, dest) dataSet = "DLmodels" # downloading/updating the DLmodels if os.path.isdir(modelsPath): print("Updating the Deep Learning models (in backgound)") task = "update" else: print("Downloading Deep Learning models (in backgound)") task = "download" global pDLdownload # using Popen instead of runJob in order to download in parallel pDLdownload = runJob("bin/xmipp_sync_data %s %s %s %s" % (task, dest, url, dataSet), cwd=cwd, show_command=False, inParallel=not dedicatedMode) if dedicatedMode: ok = pDLdownload else: # in parallel poll() is None untill finished ok = pDLdownload.poll() is None or pDLdownload.poll() == 0 return ok def install(dirname): print("Installing ------------------------------------------") cpCmd = "rsync -LptgoD" if checkProgram("rsync", False) else "cp" ok = True createDir(dirname) createDir(dirname+"/lib") ok = ok and runJob(cpCmd+" src/*/lib/lib* "+dirname+"/lib/") createDir(dirname+"/bin") ok = ok and runJob(cpCmd+" src/*/bin/* "+dirname+"/bin/") destPathPyModule = os.path.expanduser(os.path.abspath(os.path.join(dirname, "pylib", "xmippPyModules"))) createDir(destPathPyModule) initFn = destPathPyModule + "/__init__.py" if not os.path.isfile(initFn): with open(initFn, 'w') as f: pass # just to create a init file to be able to import it as module runJob(cpCmd+" -r src/xmipp/libraries/py_xmipp/* "+destPathPyModule) createDir(dirname+"/bindings") createDir(dirname+"/bindings/matlab") ok = ok and runJob(cpCmd+" src/xmipp/bindings/matlab/*.m* "+dirname+"/bindings/matlab/") createDir(dirname+"/bindings/python") ok = ok and runJob(cpCmd+" src/xmipp/bindings/python/xmipp_base.py "+dirname+"/bindings/python/") ok = ok and runJob(cpCmd+" src/xmipp/bindings/python/xmipp.py " + dirname + "/bindings/python/") ok = ok and runJob(cpCmd+" src/xmipp/bindings/python/xmipp_conda_envs.py " + dirname + "/bindings/python/") ok = ok and runJob(cpCmd+" src/xmipp/lib/xmippLib.so "+dirname+"/bindings/python/") ok = ok and runJob(cpCmd+" src/xmipp/lib/_swig_frm.so "+dirname+"/bindings/python/") createDir(dirname+"/bindings/python/sh_alignment") ok = ok and runJob(cpCmd+" -r src/xmipp/external/sh_alignment/python/* "+dirname+"/bindings/python/sh_alignment/") ok = ok and runJob(cpCmd+" src/xmipp/external/sh_alignment/swig_frm.py "+dirname+"/bindings/python/sh_alignment/") createDir(dirname+"/resources") ok = ok and runJob(cpCmd+" -r src/*/resources/* "+dirname+"/resources/") # ok = ok and runJob(cpCmd + " -r src/xmippViz/bindings/chimera " + dirname + "/bindings/") createDir(dirname+"/bindings/java") ok = ok and runJob(cpCmd+" -Lr src/xmippViz/java/lib "+dirname+"/bindings/java/") ok = ok and runJob(cpCmd+" -Lr src/xmippViz/java/build "+dirname+"/bindings/java/") ok = ok and runJob(cpCmd+" -Lr src/xmippViz/external/imagej "+dirname+"/bindings/java/") ok = ok and runJob(cpCmd+" src/xmippViz/bindings/python/xmippViz.py "+dirname+"/bindings/python/") ok = ok and runJob(cpCmd+" xmippEnv.json "+dirname+"/xmippEnv.json") if not ok: print(red("\nSome error occurred during the installation.\n")) sys.exit(1) # Scipion connection scipionSoftware = os.environ.get('SCIPION_SOFTWARE', os.path.join(getScipionHome(), 'software')) scipionLibs = os.path.join(scipionSoftware, 'lib') scipionBindings = os.path.join(scipionSoftware, 'bindings') if os.path.isdir(scipionLibs) and os.path.isdir(scipionBindings): coreLib = os.path.join(dirname, "lib", "libXmippCore.so") xmippLib = os.path.join(dirname, "lib", "libXmipp.so") bindings = os.path.join(dirname, "bindings", "python", "*") runJob("ln -srf %s %s" % (coreLib, scipionLibs)) runJob("ln -srf %s %s" % (xmippLib, scipionLibs)) runJob("ln -srf %s %s" % (bindings, scipionBindings)) else: print(yellow("No scipion3 found. If you intended to use Xmipp in " "the Scipion framework, check the binding at " "SCIPION_HOME/software/bindings...")) runJob("touch %s/v%s" % (dirname, XMIPP_VERSION)) # version token fhBash = open(dirname+"/xmipp.bashrc","w") fhFish = open(dirname+"/xmipp.fish","w") fhBash.write("# This script is valid for bash and zsh\n\n") fhFish.write("# This script is valid for fish\n\n") XMIPP_HOME = os.path.realpath(dirname) fhBash.write("export XMIPP_HOME=%s\n"%XMIPP_HOME) fhFish.write("set -x XMIPP_HOME %s\n"%XMIPP_HOME) XMIPP_SRC = os.path.realpath("src") fhBash.write("export XMIPP_SRC=%s\n"%XMIPP_SRC) fhFish.write("set -x XMIPP_SRC %s\n"%XMIPP_SRC) # SCIPION_HOME = getScipionHome() # if SCIPION_HOME: # fhBash.write("export PATH=$SCIPION_HOME/software/bin:$PATH\n") # fhBash.write("export LD_LIBRARY_PATH=$SCIPION_HOME/software/lib:$LD_LIBRARY_PATH\n") # #fhFish.write("set -px PATH $SCIPION_HOME/software/bin\n") # fhFish.write("set -px LD_LIBRARY_PATH $SCIPION_HOME/software/lib\n") virtEnvDir = os.environ.get('VIRTUAL_ENV', '') # if virtualEnv is used virtEnvLib = os.path.join(virtEnvDir, 'lib') if virtEnvDir else '' condaDir = os.environ.get('CONDA_PREFIX', '') # if conda is used condaLib = os.path.join(condaDir, 'lib') if condaDir else '' fhBash.write("export PATH=%s/bin:$PATH\n"%XMIPP_HOME) fhBash.write("export LD_LIBRARY_PATH=%s/lib:%s/bindings/python:%s:%s:$LD_LIBRARY_PATH\n" %(XMIPP_HOME, XMIPP_HOME, virtEnvLib, condaLib)) fhBash.write("export PYTHONPATH=%s/bindings/python:%s/pylib:$PYTHONPATH\n"%(XMIPP_HOME,XMIPP_HOME)) fhFish.write("set -px PATH %s/bin\n"%XMIPP_HOME) fhFish.write("set -px LD_LIBRARY_PATH %s/lib %s/bindings/python %s %s\n" %(XMIPP_HOME, XMIPP_HOME, virtEnvLib, condaLib)) fhFish.write("set -px PYTHONPATH %s/bindings %s/pylib\n"%(XMIPP_HOME,XMIPP_HOME)) fhBash.write('\n') fhBash.write("alias x='xmipp'\n") fhBash.write("alias xsj='xmipp_showj'\n") fhBash.write("alias xio='xmipp_image_operate'\n") fhBash.write("alias xis='xmipp_image_statistics'\n") fhBash.write("alias xih='xmipp_image_header'\n") fhBash.write("alias xmu='xmipp_metadata_utilities'\n") fhFish.write('\n') fhFish.write("alias x 'xmipp'\n") fhFish.write("alias xsj 'xmipp_showj'\n") fhFish.write("alias xio 'xmipp_image_operate'\n") fhFish.write("alias xis 'xmipp_image_statistics'\n") fhFish.write("alias xih 'xmipp_image_header'\n") fhFish.write("alias xmu 'xmipp_metadata_utilities'\n") fhBash.close() fhFish.close() print("\n" " *********************************************\n" " * *\n" " * Xmipp have been successfully installed! *\n" " * *\n" " *********************************************\n\n") return True def writeDevelPaths(dirname): fhBash = open(dirname+"/xmipp.bashrc","w") XMIPP_HOME = os.path.realpath(dirname) fhBash.write("export XMIPP_HOME=%s\n"%XMIPP_HOME) XMIPP_SRC = os.path.realpath("src") fhBash.write("export XMIPP_SRC=%s\n"%XMIPP_SRC) # SCIPION_HOME = getScipionHome() # if SCIPION_HOME: # fhBash.write("export PATH=$SCIPION_HOME/bin:$PATH\n") # fhBash.write("export LD_LIBRARY_PATH=$SCIPION_HOME/software/lib:$LD_LIBRARY_PATH\n") fhBash.write("export PATH=%s/xmipp/bin:%s/xmippViz/bin:$PATH\n"%(XMIPP_HOME,XMIPP_HOME)) fhBash.write("export LD_LIBRARY_PATH=%s/xmippCore/lib:$LD_LIBRARY_PATH\n"%XMIPP_HOME) fhBash.write("export LD_LIBRARY_PATH=%s/xmippCore/bindings/python:$LD_LIBRARY_PATH\n"%XMIPP_HOME) fhBash.write("export LD_LIBRARY_PATH=%s/xmipp/lib:$LD_LIBRARY_PATH\n"%XMIPP_HOME) fhBash.write("export LD_LIBRARY_PATH=%s/xmipp/bindings/python:$LD_LIBRARY_PATH\n"%XMIPP_HOME) fhBash.write("export PYTHONPATH=%s/xmippCore/bindings/python:$PYTHONPATH\n"%XMIPP_HOME) fhBash.write("export PYTHONPATH=%s/xmipp/bindings/python:$PYTHONPATH\n"%XMIPP_HOME) fhBash.write("export PYTHONPATH=%s/xmippViz/bindings/python:$PYTHONPATH\n"%XMIPP_HOME) fhBash.close() def usage(msg=''): if msg != '': print(red(msg)) print("Usage: xmipp [options]\n" " version [dir=build]; Returns the version information. Add '--short' to print only the version number.\n" " all [op1=opt1 op2=opt2...]: (Default) Retrieve [br=branch], configure, check, compile [N=8], install [dir=build]\n" " get_dependencies: Retrieve dependencies from github\n" " get_devel_sources [branch]: Retrieve development sources from github for a given branch (devel branch by default)\n" " cleanBin: Clean all already compiled files (build, .so,.os,.o in src/* and " + CONFIG_FILE_NAME + ")\n" " cleanAll: Delete all (sources and build directories)\n" " config [noAsk]: Configure compilation variables. If 'noAsk' is passed, it will try to automatically found some libraries and compilers. \n" " for compiling using system libraries\n" " check_config: Check that the configuration is correct\n" " compile [N]: Compile all modules with N processors (8 by default)\n" " compile N dependencies: Compile dependencies\n" " compileAndInstall [N] [dir] Compile all modules with N processors (8 by default) and install in the dir directory ('build' by default)\n" " compile N xmippCore: Compile xmippCore\n" " compile N xmipp: Compile xmipp\n" " compile N xmippViz: Compile xmippViz\n" " install [dir]: Install at dir (./build by default)\n" " get_models [dir]: Download the Deep Learning Models at dir/models (./build/models by default).\n" " test [--show] testName: Run tests to check Xmipp programs (without args, it shows a detailed help).\n" " if --show is activated without testName all are shown, \n" " instead a grep of testName is done \n" "For developers:\n" " create_devel_paths: Create bashrc files for devel\n" " git ...: Git command to all 4 repositories\n" " gitConfig: Change the git config from https to git\n" " addModel login modelPath: Takes a deepLearning model from the 'modelPath', makes a tgz of it and \n" " uploads the .tgz according to the . \n" " Note the login (usr@server) must have write permisions to Nolan machine.\n" " tar [v=ver] [br=br]: Create a bundle of the xmipp (without arguments shows a detailed help)\n" " can be 'Sources', 'BinDebian' or 'BinCentos', when Sources put a branch (default: master).'\n" " usually X.YY.MM (add debug to package this local script and the local scripts/tar.py) \n" ) def getCudaVersion(nvcc): # FIXME: Refator/unify this with getGccVersion() log = [] runJob(nvcc + " --version", show_output=False, show_command=False, log=log) # expected lst line: 'Cuda compilation tools, release 8.0, V8.0.61' full_version = log[-1].strip().split(', ')[-1].lstrip('V') tokens = full_version.split('.') if len(tokens) < 2: tokens.append('0') # just in case when only one digit is returned nvccVersion = float(str(tokens[0] + '.' + tokens[1])) return nvccVersion, full_version def getGccVersion(compiler): log = [] runJob(compiler + " -dumpversion", show_output=False, show_command=False, log=log) full_version = log[0].strip() tokens = full_version.split('.') if len(tokens) < 2: tokens.append('0') # for version 5.0, only '5' is returned gccVersion = float(str(tokens[0] + '.' + tokens[1])) return gccVersion, full_version def ensureCompilerVersion(compiler): if 'g++' in compiler or 'gcc' in compiler: ensureGCC_GPPVersion(compiler) else: print(red('Version detection for \'' + compiler + '\' is not implemented.')) def ensureGCC_GPPVersion(compiler): if 'TRAVIS' in os.environ: return # skip detection on TRAVIS if not checkProgram(compiler, True): sys.exit(-7) gccVersion, fullVersion = getGccVersion(compiler) if gccVersion < 4.8: # join first two numbers, i.e. major and minor version print(red('Detected ' + compiler + " in version " + fullVersion + '. Version 4.8 or higher is required.')) sys.exit(-8) else: print(green(compiler + ' ' + fullVersion + ' detected')) def ensureConfig(): # assuming the config file is not loaded in the main(), i.e. it does not exists yet and has not been created # by another function if not configDict: print(red("There is no config file. Make sure to run config")) sys.exit(-6) def ensureGit(critical=False): if not checkProgram('git', critical): if critical or os.path.isdir('.git'): # .git dir found means devel mode, which needs git print(red("Git not found.")) sys.exit(-1) else: return False return True def getScriptVersion(): """ If git not present means it is in production mode and version can be retrieved from the commit.info file """ commitFn = os.path.join('src', 'xmipp', 'commit.info') notFound = "(no git repo detected)" if ensureGit(False): scriptName = os.path.basename(__file__) lastCommit = [] # get hash of the last commit changing this script if runJob('git log -n 1 --pretty=format:%H -- ' + scriptName, '.', False, lastCommit, False): return lastCommit[0].strip() elif os.path.isfile(commitFn): with open(commitFn, 'r') as file: commitInfo = file.readline() return commitInfo else: return notFound else: return notFound if __name__ == '__main__': # Running always under this own directory. os.chdir(os.path.dirname(os.path.abspath(__file__))) askUser = True if 'noAsk' in sys.argv: askUser = False sys.argv.pop(sys.argv.index('noAsk')) n = len(sys.argv) if n == 2 and (sys.argv[1]=="help" or sys.argv[1]=="-help" or sys.argv[1]=="--help" or sys.argv[1]=="-h"): usage() sys.exit(0) for idx, arg in enumerate(sys.argv): if ' ' in arg: # to preserve spaces between "comas" sys.argv[idx] = '"%s"' % sys.argv[idx] if n>=2: mode = sys.argv[1] else: mode = "all" # if not ensureGit(): # sys.exit(-1) XMIPP_SCRIPT_VERSION = getScriptVersion() configDict = readConfigFile(CONFIG_FILE_NAME) if mode=="cleanAll": print("WARNING: This will DELETE ALL content from src and build") print(" Notice that if you have unpushed changes, \n" " they will be deleted.\n") print("Are you sure you want to do this? (YeS/No) -case sensitive-") yesno = input() if yesno == "YeS": print("Cleaning everything") cleanSources() cleanBinaries() else: print("Nothing cleaned") if yesno.lower()=="yes": print("Pay attention to capital letters of YeS") elif mode=="get_dependencies": getDependencies() elif mode=="cleanBin": cleanBinaries() elif mode=="version": buildDir = 'build' shortFlag = '' for arg in sys.argv[2:]: if arg == '--short': shortFlag = '--short' elif arg.startswith('dir='): buildDir = arg[4:] versionBin = "%s/bin/xmipp_version" % buildDir envSetting = "%s/xmipp.bashrc" % buildDir if not (os.path.isfile(versionBin) and os.path.isfile(envSetting)): print("Build not found...") else: runJob(". %s ; %s %s" % (envSetting, versionBin, shortFlag), show_command=False) elif mode=="get_devel_sources": branch = None if n==2 else sys.argv[2] getSources(branch) elif mode=="config": configDict = config() elif mode=="check_config": if not checkConfig(): print(red("\nCheck failed! Something wrong with the configuration.\n")) sys.exit(1) elif mode=="compile": Nproc = 8 if n<3 else sys.argv[2] if n<=3: ok = compile(Nproc) module = 'Xmipp' else: compileDependencies(Nproc) ok = compileModule(Nproc,sys.argv[3]) module = sys.argv[3] if ok: print("\n" " * %s have been successfully compiled * \n" " > > > Don't forget to install! < < < \n\n" % module) else: print(red("\nSome error occurred during the compilation\n")) sys.exit(1) elif mode == "compileAndInstall": Nproc = 8 dir = "build" if n > 2: for arg in sys.argv[2:]: if arg.isdigit() or arg.startswith('N='): Nproc = arg else: dir = arg ok = compile(Nproc) ok = ok and install(dir) if not ok: print(red("\nSome error occurred...\n")) sys.exit(1) elif mode=="install": if n==3: dir=sys.argv[2] else: dir="build" install(dir) elif mode=="get_models": modelsDir = 'build' if n==2 else sys.argv[2] downloadDeepLearningModels(modelsDir, dedicatedMode=True) elif mode=="test" or mode=="tests": runTests(sys.argv[2:]) elif mode=="all": Nproc = 8 branch = '' buildDir = 'build' for arg in sys.argv[2:]: if arg.startswith("N="): Nproc = int(arg[2:]) elif arg.startswith("br="): branch = arg[3:] elif arg.startswith("dir="): buildDir = arg[4:] else: usage("Unknown %s argument"%arg) sys.exit(1) # create config if not there if not os.path.isfile(CONFIG_FILE_NAME): config() else: print(blue("'%s' detected." % CONFIG_FILE_NAME)) # HACK: re-read it from file to resolve paths configDict = readConfigFile(CONFIG_FILE_NAME) ok = (checkConfig() and downloadDeepLearningModels(buildDir) and getDependencies() and getSources(branch) and compile(Nproc) and install(buildDir)) if ok: sys.exit(0) else: sys.exit(1) elif mode=="create_devel_paths": if n==3: dir=sys.argv[2] else: dir="." writeDevelPaths(dir) elif mode=="git": ensureGit(True) runJob("(cd src/xmippCore; git %s)"%" ".join(sys.argv[2:])) runJob("(cd src/xmipp; git %s)"%" ".join(sys.argv[2:])) runJob("(cd src/xmippViz; git %s)"%" ".join(sys.argv[2:])) runJob("(cd src/scipion-em-xmipp; git %s)"%" ".join(sys.argv[2:])) elif mode=="gitConfig": ensureGit(True) runJob("sed -i 's/https:\/\/github.com\//git@github.com:/g' src/xmippCore/.git/config") runJob("sed -i 's/https:\/\/github.com\//git@github.com:/g' .git/config") runJob("sed -i 's/https:\/\/github.com\//git@github.com:/g' src/xmippViz/.git/config") runJob("sed -i 's/https:\/\/github.com\//git@github.com:/g' src/scipion-em-xmipp/.git/config") elif mode=='addModel': update = False if not (n == 4 or (n==5 and sys.argv[4] == '--update') or (n==3 and sys.argv[2] == '--help')): print("Incorrect number of parameters.\n") usage() sys.exit(1) addDeepLearninModel(*sys.argv[2:]) elif mode=='tar': if len(sys.argv) < 3: runJob("scripts/tar.py --help") sys.exit(0) ver = XMIPP_VERSION br = 'master' mode = sys.argv[2] debugFlag = '' for arg in sys.argv[3:]: if arg.startswith('br='): br = arg.split('br=')[1] if arg.startswith('v='): ver = arg.split('v=')[1] if arg.lower() == 'debug': debugFlag = 'debug' runJob("scripts/tar.py %s %s %s %s" % (mode, ver, br, debugFlag)) else: usage(" -> option not found <- \n")