#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import sys import os import glob import time import stat import urllib2 import subprocess import imp import random import re ENABLE_UPDATE_CHECKS = True UPDATE_SERVER_TIMEOUT = 2.0 CURRENT_SCRIPT_VERSION = 162 TUTORIAL_URL='https://scs.techfak.uni-bielefeld.de/wiki/gettingstarted' def show_update_info(oldver, newver): if oldver<103: print(' r103: First public version') if oldver<104: print(' r104: Auto-update fix') if oldver<105: print(' r105: Resolution fix') if oldver<106: print(' r106: Default RSB config creation') if oldver<107: print(' r107: Build time information') if oldver<108: print(' r108: "update" and "update-all" added') if oldver<109: print(' r109: Ant-based resolution added') if oldver<110: print(' r110: build, update, clean, run: project name can be omitted\n if current directory is inside a project directory') if oldver<111: print(' r111: Update info fix') if oldver<112: print(' r112: .soarc; extra setup for netboot') if oldver<113: print(' r113: "soa list" username fix') if oldver<114: print(' r114: Resolver handles zip files; Mac Java check') if oldver<115: print(' r115: Added HTTP access as an alternative to portb ssh') if oldver<116: print(' r116: Config creation, cmake, bash_profile fix') if oldver<117: print(' r117: Additional checks for Mac setup') if oldver<118: print(' r118: Netboot volumes: additional checks') if oldver<119: print(' r119: use bash on remote side no matter the preferences') if oldver<120: print(' r120: fixed previous version: now using script in volume') if oldver<121: print(' r121: better support for nested projects') if oldver<122: print(' r122: "soa create" automatically clones dependencies') if oldver<123: print(' r123: gracefully ignoring project self-dependencies') if oldver<124: print(' r124: terminate after canceling ant; moved qt') if oldver<125: print(' r125: added local self resolution (succeeds r123)') if oldver<126: print(' r126: using password-protected http access') if oldver<127: print(' r127: deprecated http in favor of porta; added "soa fix"') if oldver<128: print(' r128: added disclaimer') if oldver<129: print(' r129: added warnings for non-bash default shells') if oldver<130: print(' r130: updated rsb config to reflect new plugin system') if oldver<131: print(' r131: extra --skip-ant-resolve and --skip-local-resolve\n speedup options for "soa build"') if oldver<132: print(' r132: combined --fast option for "soa build"') if oldver<133: print(' r133: increased permsize for ant to 1024M') if oldver<134: print(' r134: changed project root resolution to support symlinks') if oldver<135: print(' r135: now updates hmibuild on commands update-all and fix') if oldver<136: print(' r136: bugfix: r134 had accidentally broken "soa init"') if oldver<137: print(' r137: old repo resolution restored, accounting for non-default terminals') if oldver<138: print(' r138: print archive names during local resolution') if oldver<139: print(' r139: added 2 sec timeout for script update check') if oldver<140: print(' r140: support for supplemental repositories') if oldver<141: print(' r141: new "soa list", support repositories on OS X machines;\n "soa create" looks in supplemental repos') if oldver<142: print(' r142: print warning if no files are imported from a dependency') if oldver<143: print(' r143: better separation of lists from different sources') if oldver<144: print(' r144: also print group name for shared repositories') if oldver<145: print(' r145: process optional deps using --with-optional;\n "soa list " also searches in descriptions') if oldver<146: print(' r146: Optionally configures PYTHONPATH to include ipaaca\n package and PATH to include ipaaca scripts') if oldver<147: print(' r147: unified .soarc path specifications') if oldver<148: print(' r148: "soa deps [--with-optional]": deps in build order') if oldver<149: print(' r149: "soa deps" notifies of incomplete searches') if oldver<150: print(' r150: "soa showconfig": show go... script default values') if oldver<151: print(' r151: show Wiki links during "soa help" and "soa init"') if oldver<152: print(' r152: support for remote migration (e.g. to github);\n started adaptation for new TechFak infrastructure') if oldver<153: print(' r153: soa init for new infrastructure (migration pending)') if oldver<154: print(' r154: migrated to server "files" and repos on /vol/scssrc') if oldver<155: print(' r155: fixed access error') if oldver<156: print(' r156: try to find outdated remotes with other directory bases') if oldver<157: print(' r157: mended "soa fix"') if oldver<158: print(' r158: mended remote change') if oldver<159: print(' r159: re-added SOA_SERVER_URL for use by external scripts') if oldver<160: print(' r160: "soa list" in alphabetical order again (on all systems)') if oldver<161: print(' r161: disable introspection for default rsb.conf (UTF8 issues)') if oldver<162: print(' r162: "soa init" can install bash completion rules to home dir') # the main repo url on the server MAIN_REPO_SERVER_ADDRESS = 'files.techfak.uni-bielefeld.de' #SSH_REMOTE_GIT_ROOT = '/vol/soa/repositories' # OLD SSH_REMOTE_GIT_ROOT = '/vol/scssrc/repositories' # base URL for new public versions of SOA script #REMOTE_SCRIPT_BASE = 'http://www.techfak.uni-bielefeld.de/~ryaghoub' REMOTE_SCRIPT_BASE = 'http://www.techfak.uni-bielefeld.de/ags/soa/software' REMOTE_SCRIPT_FILE = REMOTE_SCRIPT_BASE+'/soa' SOA_NETBOOT_VOLUME_DIR = '/vol/soa' BUILD_SYSTEM_REPO_NAME = 'hmibuild' BUILD_SYSTEM_SOURCE = 'https://github.com/ArticulatedSocialAgentsPlatform/hmibuild' SSH_SERVER_URL_PORTB = 'portb.techfak.uni-bielefeld.de' SSH_SERVER_URL_PORTA = 'porta.techfak.uni-bielefeld.de' VERY_OLD_SSH_REMOTE_GIT_ROOT = '/vol/www/techfak/ags/soa/git' # old volume OLD_SSH_REMOTE_GIT_ROOT = '/vol/soa/repositories' # old volume #MATCH_PORTA_PORTB_VOL_SOA_REPO = r'port[ab][.:].*/vol/soa/repositories' # for server migration MATCH_VOL_SOA_REPO = r'/vol/soa/repositories' # for volume migration MATCH_PORTA_PORTB_MISC = r'port[ab](.techfak.uni-bielefeld.de)?:' # for other repos #SSH_REMOTE_GIT_ROOT = SOA_NETBOOT_VOLUME_DIR+'/repositories' # http access is now DEPRECATED HTTP_SERVER_URL = 'www.techfak.uni-bielefeld.de' HTACCESS_DEFAULT_USERNAME = 'students' HTTP_REMOTE_GIT_ROOT = '/ags/soa/git' REMOTE_PROJECT_LIST_FILE='/project-list-cached.txt' SOA_DISCLAIMER=''' DISCLAIMER The software is provided "as is", without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages or other liability, whether in an action of contract, tort or otherwise, arising from, out of or in connection with the software or the use or other dealings in the software. ''' RESOLVE_SCRIPT_SOURCE='''#!/bin/bash # This file has been auto-generated by the soa script. BASEPATH="%s" shopt -q nullglob || resetnullglob=1 shopt -s nullglob shopt -q dotglob || resetdotglob=1 shopt -s dotglob function make_directories { mkdir -p deps/bin mkdir -p deps/lib mkdir -p deps/include mkdir -p deps/scripts mkdir -p deps/python } function resolve_locally { echo "Importing from $1 ..." # Begin legacy resolution files=($BASEPATH/$1/dist/bin/*); [ "$files" ] && cp -a $BASEPATH/$1/dist/bin/* deps/bin/ files=($BASEPATH/$1/dist/lib/*); [ "$files" ] && cp -a $BASEPATH/$1/dist/lib/* deps/lib/ files=($BASEPATH/$1/dist/include/*); [ "$files" ] && cp -a $BASEPATH/$1/dist/include/* deps/include/ files=($BASEPATH/$1/dist/scripts/*); [ "$files" ] && cp -a $BASEPATH/$1/dist/scripts/* deps/scripts/ files=($BASEPATH/$1/dist/python/*.zip); [ "$files" ] && for zipfile in $BASEPATH/$1/dist/python/*.zip; do echo " $zipfile" unzip -oqq $zipfile -d deps/python done # End legacy resolution foundsome=0 files=($BASEPATH/$1/dist/*.cpp.zip); [ "$files" ] && foundsome=1 && for zipfile in $BASEPATH/$1/dist/*.cpp.zip; do echo " $zipfile" unzip -oqq $zipfile -d deps/ done files=($BASEPATH/$1/dist/*.py.zip); [ "$files" ] && foundsome=1 && for zipfile in $BASEPATH/$1/dist/*.py.zip; do echo " $zipfile" unzip -oqq $zipfile -d deps/python done files=($BASEPATH/$1/dist/*.scripts.zip); [ "$files" ] && foundsome=1 && for zipfile in $BASEPATH/$1/dist/*.scripts.zip; do echo " $zipfile" unzip -oqq $zipfile -d deps/scripts done [ "$foundsome" = "0" ] && { [ "$2" = "selective" ] && echo " (No local files imported.)" || echo " WARNING: No files imported. Has 'soa build $1' been run?" } } if [ -z "$1" ] then # Ant resolution and local resolution for the packages listed in DEPS.txt eval "`grep '^REQUIRED=\|^OPTIONAL=' DEPS.txt`" REQ="$REQUIRED" OPT="$OPTIONAL" echo "Performing ant resolution ..." %s ant resolve [ $? = 130 ] && exit 130 echo "Performing local resolution ... " make_directories for P in $REQ $OPT; do %s resolve_locally $P normal true done else # Local resolution for the 1 package provided as argument to this script echo "Performing selective local resolution ... " make_directories resolve_locally $1 selective fi chmod -R +x deps/bin chmod -R +x deps/scripts echo "Done." [ "$resetdotglob" ] && shopt -u dotglob [ "$resetnullglob" ] && shopt -u nullglob ''' # required since find syntax differs across systems ... REPO_LIST_SCRIPT_SOURCE='''#!/usr/bin/env python import sys, os, stat, pwd, grp, glob for fn in glob.glob('*.git'): st=os.stat(fn) un=pwd.getpwuid(st.st_uid).pw_name if st.st_mode & stat.S_IWGRP: gr=grp.getgrgid(st.st_gid).gr_name un+=':'+gr descr = '(No access)' try: with open(fn+'/description', 'r') as f: descr = ' '.join(map(str.strip, f.readlines())) except Exception, e: pass print(fn+"||"+un+"||"+descr) ''' DEFAULT_RSB_CONF_SPREAD_PLUGIN=''' [plugins.cpp] load = rsbspread ''' DEFAULT_RSB_CONF=DEFAULT_RSB_CONF_SPREAD_PLUGIN+''' [introspection] enabled = 0 [transport.socket] enabled = 0 [transport.spread] host = localhost port = 4803 enabled = 1 ''' BASH_COMPLETION_MARKER="soa script completion generated by soa init" BASH_COMPLETION_SOURCE = "# " + BASH_COMPLETION_MARKER + '\n' + '''_soa() { if [ $COMP_CWORD -eq 1 ]; then SOA_COMMANDS="build build-all clean clean-all clone clone-all create deps fix help history init list pull pull-all run selfupdate showconfig update update-all" local cur=${COMP_WORDS[COMP_CWORD]} COMPREPLY=( $(compgen -W "${SOA_COMMANDS}" -- $cur) ) elif [ $COMP_CWORD -gt 1 ]; then CMD=${COMP_WORDS[1]} PROJS=$(cd $SOA_REPO_DIR; /bin/ls -1d -- */) case "$CMD" in "clean-all" | "clone-all" | "build-all" | "update-all" | "create" | "deps" ) PROJS="--with-optional $PROJS" ;; esac case "$CMD" in "build" | "build-all" ) PROJS="--skip-ant-resolve --fast $PROJS" ;; esac local cur=${COMP_WORDS[COMP_CWORD]} matches=$(compgen -W "${PROJS}" -- $cur) COMPREPLY=( ${matches//\//} ) fi } complete -F _soa soa ''' if sys.stdout.isatty(): def highlight(text): return ''+text+'' def highlight_red(text): return ''+text+'' def highlight_green(text): return ''+text+'' def highlight_blue(text): return ''+text+'' else: def highlight(text): return text def highlight_red(text): return text def highlight_green(text): return text def highlight_blue(text): return text def determine_system_type(): pipe = subprocess.Popen('uname -s', shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout systype = pipe.read().strip() pipe.close() # chdir to volume dir to force NFS mapping ret = subprocess.call("( cd "+SOA_NETBOOT_VOLUME_DIR+' >/dev/null 2>&1 )', shell=True) hasvol = os.path.exists(SOA_NETBOOT_VOLUME_DIR) return systype, hasvol def get_columns(): pipe = subprocess.Popen('tput cols', shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout cols = pipe.read().strip() pipe.close() return 80 if cols=='' else int(cols) def get_path_to_script(): pathname = os.path.realpath(__file__) return os.path.abspath(pathname) def get_path_to_script_dir(): pathname = os.path.realpath(os.path.dirname(__file__)) return os.path.abspath(pathname) def determine_paths(): pathstr = os.getenv('PATH') if pathstr is None: return [] else: return pathstr.split(':') def is_script_in_path(): return get_path_to_script_dir().strip() in determine_paths() def determine_writeable_paths(): paths = determine_paths() writeable_paths = [] non_writeable_paths = [] # get UID and GIDs pipe = subprocess.Popen('id -u', shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout own_uid = int(pipe.read().strip()) pipe.close() pipe = subprocess.Popen('id -G', shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout own_gids = map(int, pipe.read().split()) pipe.close() for path in paths: path=path.strip() if path != '' and os.path.isdir(path): if own_uid == 0: # root / sudo can write everything writeable_paths.append(path) else: # check whether user will be able to write to dir # (check permissions; without actually writing anything) st = os.stat(path) wusr = (stat.S_IMODE(st.st_mode) & stat.S_IWUSR) > 0 wusr_able = wusr and (st.st_uid == own_uid) wgrp = (stat.S_IMODE(st.st_mode) & stat.S_IWGRP) > 0 wgrp_able = wgrp and (st.st_gid in own_gids) and (st.st_uid != own_uid) woth = (stat.S_IMODE(st.st_mode) & stat.S_IWOTH) > 0 woth_able = woth and (st.st_uid != own_uid) and (st.st_gid not in own_gids) can_write = wusr_able or wgrp_able or woth_able if can_write: writeable_paths.append(path) else: non_writeable_paths.append(path) return writeable_paths, non_writeable_paths def get_terminal_relaunch_self_command(): systype, hasvol = determine_system_type() if systype == 'Darwin': return 'osascript -e \'tell application "Terminal" to do script "'+get_path_to_script()+' init"\'' else: return 'xterm -e "'+get_path_to_script()+'" &' def terminal_relaunch_self(): cmd = get_terminal_relaunch_self_command() #pipe = subprocess.Popen(cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout child = subprocess.Popen(cmd, shell=True, bufsize=16384) sys.exit(0) def extract_options(args_orig, options): args = args_orig[:] results = {} i=0 while i < len(args): found = False for o,with_arg in options.items(): if args[i]=='--'+o: found = True if with_arg: if i+10 and shell[-1] != 'bash': print(highlight_red('\n Sorry - unable to automatically write the shell setup:')) print(highlight(' You seem not to be using a standard bash shell.')) print(' Please set the variable SOA_DISCLAIMER_ACCEPTED=1') print(' in your .soarc file, according to your proper shell syntax,') print(' and proceed from a new terminal for changes to take effect.') sys.exit(0) else: print(' The choice can be saved to your .soarc for the future') while True: ans = raw_input(' Append to .soarc (y/n): ') if ans in ['y', 'Y', 'yes']: f = open(confname, 'a') f.write('\nexport SOA_DISCLAIMER_ACCEPTED=1\n') f.close() print(highlight('Please proceed from a new terminal for changes to take effect.')) sys.exit(0) elif ans in ['n', 'N', 'no']: print(' OK - skipping.') break return True elif ans in ['NO']: print(highlight_red('Execution canceled.')) sys.exit(0) def get_access_config(self): self.server_url = MAIN_REPO_SERVER_ADDRESS self.use_http = False self.remote_git_root = SSH_REMOTE_GIT_ROOT def get_repo_dir(self): return os.getenv('SOA_REPO_DIR'), os.getenv('HOME', '$HOME')+'/repo' def get_remote_script(self): try: response = urllib2.urlopen(REMOTE_SCRIPT_FILE, timeout=UPDATE_SERVER_TIMEOUT) scripttext = response.read() scriptlines=scripttext.split('\n') for l in scriptlines: if l.find('CURRENT_SCRIPT_VERSION')==0: version = int(l.strip().split('=')[1]) break return scripttext, version except Exception, e: print(highlight_red('Error - Failed to open the official remote script - update check disabled!')) return '', CURRENT_SCRIPT_VERSION def make_git_url(self, proj=None, source=None): u='' if self.use_http: u = 'http://'+self.server_url+self.remote_git_root+('' if proj is None else ('/'+proj+'.git')) else: username = self.techfak_username if self.techfak_username is not None else os.getenv('USER') if source is None: # default source u = 'ssh://'+username+'@'+self.server_url+self.remote_git_root+('' if proj is None else ('/'+proj+'.git')) else: # requested special source url = source['url'] if '@' not in url: url = username+'@'+url u = 'ssh://'+url+('' if proj is None else ('/'+proj+'.git')) #print(u'DEBUG: make_git_url: '+unicode(u)) return u def update_check(self): self.remote_script_text, self.remote_script_version = self.get_remote_script() if self.remote_script_version > CURRENT_SCRIPT_VERSION: print(highlight_blue('Info - new version of the script is available; update with "soa selfupdate"')) elif self.remote_script_version < CURRENT_SCRIPT_VERSION: print(highlight_blue('Info - your version of the soa script is newer than the official one.')) print(' You can downgrade to the official remote version using "soa selfupdate"') def env_check(self, silent=False): self.repo_dir_ok = False if self.repo_dir is not None: if os.path.isdir(self.repo_dir): self.repo_dir_ok = True pass else: if silent: return print(highlight_red('Warning - your working environment is not fully initialized!')) print(' SOA_REPO_DIR defined as '+self.repo_dir+', but not accessible!\n---') else: if silent: return print(highlight_red('Warning - your working environment is not initialized!')) print(' Consider running "soa init" (it is a guided process),') print(' or export the necessary environment manually in your shell setup.\n---') def enforce_env(self): if self.repo_dir is None: print(highlight_red('Error - your working environment is not fully initialized!')) print(highlight_red(' Please use "soa init" to create it, or set up your shell login file')) sys.exit(1) def path_to_list(self, l): p=[] while l!='': l,e=os.path.split(l) p.append(e) if l=='/': break p.reverse() return p def list_to_path(self, l, prefix=''): path=prefix for el in l: path=os.path.join(path, el) return path def guess_project_name_based_on_cwd(self, crop=True): if not self.repo_dir_ok: return [] #### Old resolution, below, has been restored by request ## New path resolution (defaulting to shell-provided PWD) ## This means that symlinked project roots are now fully supported ## - if you use a shell that sets PWD. #relname = os.getenv('PWD') #if relname is None or len(relname)==0: # print(highlight_red('PWD not set, will use getcwdu() -> symlinked project root not supported.')) # relname = os.getcwdu() #proj_parts = self.path_to_list(relname) # Older path resolution from r133 and before: relname = os.path.realpath(os.getcwd()) absname = os.path.abspath(relname) proj_parts = self.path_to_list(absname) repo_parts = self.path_to_list(self.repo_dir) #print(str(repo_parts)) #print(str(proj_parts)) if len(repo_parts)0: #print(str(extra_parts)) e = extra_parts[:] e.append('DEPS.txt') filename = self.list_to_path(e, prefix=self.repo_dir) if os.path.exists(filename): #print('exists: '+filename) projname = self.list_to_path(extra_parts, prefix='') #print(highlight_green('Found a DEPS.txt in '+projname+', using this directory implicitly.')) return [projname] else: pass #print('-') #print('does not exist: '+filename) extra_parts = extra_parts[:-1] return [] #if absname.find(self.repo_dir)==0: # head, tail = os.path.split(absname) # last_head = head # while head!='/': # if head == self.repo_dir: # if tail.strip()!='': # return [tail.split('/')[0]] # else: # return [] # last_head = head # head, tail = os.path.split(head) # if last_head==head: # break # return [] else: print(highlight_red(' Not inside a repo root subdir, failed to guess project name.')) return [] def is_soa_netboot(self): return self.has_soa_volume def is_mac(self): return self.system_type == 'Darwin' def get_editor(self): ed = os.getenv('EDITOR') if self.is_mac(): if ed is not None: ret = subprocess.call("which "+ed+" >/dev/null 2>&1", shell=True) if ret==0: return ed return 'open' else: editors = ['gedit', 'gvim'] if ed is not None: editors.insert(0, ed) for i in editors: ret = subprocess.call("which "+i+" >/dev/null 2>&1", shell=True) if ret==0: return i return None def print_soa_title(self): print('soa script, revision '+str(CURRENT_SCRIPT_VERSION)+' (c) 2012-2017 Social Cognitive Systems Group\nThe soa script is used to create, fetch, update, compile, run soa packages.\n---') def cmd_help(self, args): if len(args)>2: cmd=args[2] handler = None try: cmd_ = cmd.replace('-','_') handler = getattr(self, 'longhelp_'+cmd_) except Exception, e: pass if handler is None: try: cmd_ = cmd.replace('-','_') handler = getattr(self, 'shorthelp_'+cmd_) handler(args) except Exception, e: print('Sorry - no specific help defined for command "'+cmd+'"') pass else: print('Help for command "'+cmd+'":') handler(args) else: self.print_soa_title() print('For basic usage, please refer to this Wiki page:') print(' '+TUTORIAL_URL) print('---') print('List of commands (use "soa help " for specific help):') cmds = sorted(self.get_command_list()) for c in cmds: hlp = c.replace('_', '-') try: handler = getattr(self, 'shorthelp_'+c) handler(args) except Exception, e: print(' '+hlp) pass def shorthelp_help(self, args): print(' help [] this help / show help for specific ') def get_command_list(self): cmds = [] attrs=dir(self) for a in attrs: if a.find('cmd_') == 0: cmds.append(a[4:]) return cmds def shorthelp_history(self, args): print(' history print script revision history') def longhelp_history(self, args): print('"soa history" prints the revision history of the soa script.') def cmd_history(self, args): print(highlight('Known revision history:')) show_update_info(0, CURRENT_SCRIPT_VERSION) sys.exit(0) def cmd_selfupdate(self, args): if self.remote_script_text is None or self.remote_script_text == '': print(highlight_red(' Cannot self-update: remote script not found.\n Please re-download from the wiki/website.')) else: print(highlight(' Information about remote version:')) mod = imp.new_module('soa%06x'%random.randint(0,1000000)) exec self.remote_script_text in mod.__dict__ if 'show_update_info' in dir(mod): mod.show_update_info(CURRENT_SCRIPT_VERSION, self.remote_script_version) path = get_path_to_script() while True: ans = raw_input(' Replace '+path+' (ver '+str(CURRENT_SCRIPT_VERSION)+') with remote version '+str(self.remote_script_version)+' ? (y/n): ') if ans in ['y', 'Y', 'yes']: break elif ans in ['n', 'N', 'no']: print(' OK - canceling.') return print(highlight(' Replacing script ...')) try: f = open(path, 'w') f.write(self.remote_script_text) f.close() print(highlight_green('Update succeeded.')) except Exception, e: print(highlight_red('Update failed: Failed to write to '+path)) def shorthelp_selfupdate(self, args): print(' selfupdate perform a self-update in-place') def longhelp_selfupdate(self, args): print(' The "soa selfupdate" command updates the soa script, replacing the current') print(' file with the official version (if such exists). Current remote location:') print(' '+REMOTE_SCRIPT_FILE) def shorthelp_list(self, args): print(' list [] list remotely available projects [containing ]') def longhelp_list(self, args): print(' The "soa list" command lists the contents of the remote SOA git root.\n You can specify a string that must match (case-insensitively) in project names.') def cmd_list(self, args): filt='' if len(args) > 2: filt=args[2] sources = [None] sources.extend(self.extra_sources.values()) for i,source in enumerate(sources): urlstr = (self.server_url+':'+self.remote_git_root+' (main source)') if source is None else (source['url']+' (source name '+highlight('"'+source['id']+'"')+')') proj, templ = self.get_remote_project_list(source) # determine nice formatting #colwidth1, colwidth2 = 16, 14 colwidth1, colwidth2 = 30, 0 for p in proj: if len(p[0])>=colwidth1: colwidth1 = len(p[0]) #if len(p[1])>=colwidth2: # colwidth2 = len(p[1]) for p in templ: if len(p[0])>=colwidth1: colwidth1 = len(p[0]) #if len(p[1])>=colwidth2: # colwidth2 = len(p[1]) cols = get_columns() - colwidth1 - colwidth2 - 4 firstline = True if len(proj)>0: for p in proj: if filt=='' or (p[0].lower().find(filt.lower()) > -1) or (p[2].lower().find(filt.lower()) > -1): if firstline: firstline = False if len(args) > 2: print('Projects at '+urlstr+' containing "'+filt+'" :') #filt = '*'+filt else: print('Projects at '+urlstr+' :') #print(('%-'+str(colwidth1)+'s %-'+str(colwidth2)+'s %s')%('(project)','(maintainer)','(description)')) print(('%-'+str(colwidth1)+'s %s')%('(project)','(description)')) #print((highlight(' %-'+str(colwidth1)+'s')+' %-'+str(colwidth2)+'s %s')%(p[0],p[1],p[2][0:cols])) print((highlight(' %-'+str(colwidth1)+'s')+' %s')%(p[0],p[2][0:cols])) else: print(highlight(' (None!)')) # firstline = True for p in templ: if filt=='' or (p[0].lower().find(filt.lower()) > -1) or (p[2].lower().find(filt.lower()) > -1): if firstline: firstline = False if len(args) > 2: print('Templates at '+urlstr+' containing "'+filt+'" :') else: print('Templates at '+urlstr+' :') #print((highlight(' %-'+str(colwidth1)+'s')+' %-'+str(colwidth2)+'s %s')%(p[0],p[1],p[2][0:cols])) print((highlight(' %-'+str(colwidth1)+'s')+' %s')%(p[0],p[2][0:cols])) if i/dev/null 2>&1' ret = subprocess.call(list_rsync_command, shell=True) #if ret==0: data = '' cached_items = [] # produce list from local clone for repodir in glob.glob(temp_dir+'/*/*.git'): reponame = repodir.rsplit('/', 1)[1] try: with open(repodir+'/description', 'r') as f: text = f.read().replace('\n', ' ').strip() except: text = '(no access)' cached_items.append( [reponame, text] ) for item in sorted(cached_items, key=lambda i:i[0].lower()): reponame, text = item line = reponame + '||(unknown)||' + text data += line + '\n' #else: # data = '(ERROR).git||(ERROR)||Error accessing source via rsync!' #print(temp_dir) #ret = subprocess.call('ls -l "'+temp_dir+'"', shell=True) ret = subprocess.call('rm -rf "'+temp_dir+'"', shell=True) else: # old block (pre TechFak ssh blockage) #remote_list_file=self.remote_git_root+REMOTE_PROJECT_LIST_FILE #list_cmd = 'ssh '+username+"@"+self.server_url+' "bash -c "cd '+self.remote_git_root+'; TM=\`/bin/tempfile --prefix=soa\`; find -maxdepth 1 -type d -name \''+'*.git\' -printf \'%f:%u:\' -exec sed -e\'s/\\n/ /g\' {}/description \; | sort | tee \$TM; mv \$TM '+remote_list_file+' 2>&1 || rm \$TM 2>&1; chmod 664 '+remote_list_file+' 2>&1; chown :soawww '+remote_list_file+'\""' #list_cmd = 'ssh '+username+"@"+self.server_url+' "bash -c \'/vol/soa/bin/soalist '+self.remote_git_root+' '+remote_list_file+'\'"' #list_cmd = 'ssh '+connect+' "cd '+path+'; find . -maxdepth 1 -type d -name \'*.git\' -printf \'%f:%u:\' -exec sed -e\'s/\\n/ /g\' {}/description \; | sort"' #print(list_cmd) #data = pipe.read().strip() list_cmd = 'ssh '+connect+' \'cd '+path+'; ( fn=`mktemp /tmp/repo_list_script.XXXXXX`; cat > $fn; python $fn; rm $fn ) | sort\'' pipe = subprocess.Popen(list_cmd, shell=True, bufsize=16384, stdin=subprocess.PIPE, stdout=subprocess.PIPE) data = pipe.communicate(input=REPO_LIST_SCRIPT_SOURCE)[0].strip() # Now visualize 'data', no matter whence it came. proj = [] templ = [] for l in data.split('\n'): f = l.split('||') if len(f)<3: break name = f[0][:-4] maint = f[1] descr = ':'.join(f[2:]) if descr.find('Unnamed repository;')==0: descr = '-' if name.find('TEMPLATE_')==0: name = name[9:] templ.append([name, maint, descr]) else: proj.append([name, maint, descr]) # return proj, templ def shorthelp_fix(self, args): print(' fix fix projects to a sane state (guided process)') def longhelp_fix(self, args): print(' The command "soa fix" will lead you through a sequential process for\n' +' checking and repairing project setup files inside your work directory.\n' +' It is a basic sanity check and will adapt your tree to script changes.') def cmd_fix(self, args): ''' Perform sanity checks on the project directories and fix errors. ''' print(highlight('soa fix - fix projects to a sane state:')) self.enforce_env() print(highlight('Step #1: checking for deprecated git remote URLs')) # get list of remote projects (to see what can be migrated) projs, templ = self.get_remote_project_list() remote_gits = [it[0] for it in projs+[['TEMPLATE_'+n[0],n[1],n[2]] for n in templ]] print('All projects in the remote repo: '+' '.join(sorted(remote_gits))) # iterate directories in work dir dirlist = os.listdir(self.repo_dir) for proj in dirlist: if os.path.isdir(self.repo_dir+'/'+proj): if os.path.isdir(self.repo_dir+'/'+proj+'/.git'): self.perform_repo_fix(proj, remote_gits, warn_if_nonexistent=True) print(highlight('Step #2: double-checking for outdated rsb config')) self.perform_config_file_setup('', show_info=False) print(highlight('Step #3: trying to update the build system ('+BUILD_SYSTEM_REPO_NAME+')')) self.perform_update(BUILD_SYSTEM_REPO_NAME, False) print(highlight('Step #4: checking basic system setup')) self.perform_system_specific_setup(check_only=True) print(highlight('soa fix - completed.')) def perform_repo_fix(self, proj, remote_gits, warn_if_nonexistent=False): '''See whether a directory is a clone of a deprecated remote, and offer to change to the new remote (if such exists).''' print('Checking '+proj) newurl = None specified_new_remote_location = None remote_change_cmd = "cd "+self.repo_dir+'/'+proj+"; git log 2>&1 | grep '^\s*REMOTE-CHANGE:' | head -n1" pipe = subprocess.Popen(remote_change_cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout remote_change_lines = pipe.read().strip().split('\n') pipe.close() if len(remote_change_lines)>0 and len(remote_change_lines[0])>0: specified_new_remote_location = remote_change_lines[0].strip().split(' ',1)[1] # get_remote_cmd = "cd "+self.repo_dir+'/'+proj+"; git remote -v 2>&1 | grep '^origin\t'" pipe = subprocess.Popen(get_remote_cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout remote_lines = pipe.read().strip().split('\n') pipe.close() tentative = False if len(remote_lines)>0 and len(remote_lines[0])>0: # outdated main repo outdated_remotes_main = filter(lambda m: m is not None, map(lambda m: None if m is None else m.group(0), [ re.search(MATCH_VOL_SOA_REPO, remote) for remote in remote_lines ])) # possibly outdated secondary repos (containing porta. portb. porta: portb:) outdated_remotes_port = filter(lambda m: m is not None, map(lambda m: None if m is None else m.group(0), [ re.search(MATCH_PORTA_PORTB_MISC, remote) for remote in remote_lines ])) #outdated_remotes = filter(lambda i: i.find(VERY_OLD_SSH_REMOTE_GIT_ROOT)>-1 or i.find(OLD_SSH_REMOTE_GIT_ROOT)>-1 or i.find(HTTP_REMOTE_GIT_ROOT)>-1, remote_lines ) #isilon words = remote_lines[0].split() oldurl = words[1] gitproj = oldurl.split('/')[-1].split('.')[0] if specified_new_remote_location is not None: # URL REMOTE-CHANGE: has been specified in project's git log, propose to use it if oldurl.find(specified_new_remote_location.split('//', 1)[-1])==-1: newurl = specified_new_remote_location elif len(outdated_remotes_main)>0 and gitproj in remote_gits: # old URL used, propose to use new master server newurl = self.make_git_url(gitproj) if specified_new_remote_location is None else specified_new_remote_location elif len(outdated_remotes_main)==0 and len(outdated_remotes_port)>0: tentative = True newurl = re.sub(MATCH_PORTA_PORTB_MISC, 'files.techfak.uni-bielefeld.de:', oldurl) #elif len(outdated_remotes_main)>0 and len(outdated_remotes_port)==0: # pass # all OK elif len(outdated_remotes_main)==0: # all OK pass else: if warn_if_nonexistent: print(highlight_red(' Warning: Project '+proj+' uses repo ')+highlight(gitproj)+highlight_red(', which does not exist anymore in the new root!')) if newurl is not None: if tentative: print(highlight(' Project '+proj+' possibly uses a remote on an old server: '+oldurl)) else: print(highlight(' Project '+proj+' uses a deprecated remote repository: '+oldurl)) while True: ans = raw_input(' Should it be replaced with '+highlight_blue(newurl)+' ? (y/n) : ') if ans in ['y', 'Y', 'yes']: cmd = "cd "+self.repo_dir+'/'+proj+"; git remote set-url origin "+newurl ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(' OK - project migrated to new remote.')) break else: print(highlight_red(' Error migrating to new remote.')) break elif ans in ['n', 'N', 'no']: print(' OK - skipped migration.') break def shorthelp_init(self, args): print(' init initialize a working environment (guided process)') def longhelp_init(self, args): print(' The command "soa init" will lead you through a sequential process for\n' +' setting up or repairing your local soa work environment. You can acknowledge\n' +' each step individually. Usually, initialization has to be done only once.') def cmd_init(self, args): print(highlight_green('soa init - initialize a working environment:')) self.perform_init() def perform_init(self): self.perform_own_runpath_setup() ans = raw_input(' [Hit Enter for the next step in the init process] ') self.perform_system_specific_setup() ans = raw_input(' [Hit Enter for the next step in the init process] ') self.ask_access_config() ans = raw_input(' [Hit Enter for the next step in the init process] ') path, parall = self.ask_path_setup() if path is None: return self.perform_path_setup(path) ans = raw_input(' [Hit Enter for the next step in the init process] ') self.perform_basic_repo_setup(path) ans = raw_input(' [Hit Enter for the next step in the init process] ') self.perform_config_file_setup(path) ans = raw_input(' [Hit Enter for the next step in the init process] ') self.perform_shell_setup(path, parall) print(highlight_green(' ... and we are done.')) print(highlight_green(' Start importing projects using "soa list" / "soa clone" (-> "soa help")')) if self.new_term_required and not self.reboot_required: print(highlight(' NOTE: Modified .bashrc / .soarc, please open a new terminal or re-login.')) if self.reboot_required: print(highlight('\n NOTE: RE-LOGIN / REBOOT is required due to rcinfo changes!')) def perform_own_runpath_setup(self): print(' ---\n'+highlight('=== SCRIPT SETUP ===')) w_paths, n_w_paths = determine_writeable_paths() if is_script_in_path(): if get_path_to_script_dir() in w_paths: print(highlight_green(' The soa script is in the PATH, and writeable.')) else: print(highlight(' OK, the soa script is in a write-restricted PATH.\n For future updates, use "sudo soa selfupdate".')) else: print(highlight_red(' The soa script is not in your binary search PATH.')) def perform_system_specific_setup(self, check_only=False): must_run_init = False print(' ---\n'+highlight('=== SYSTEM SETUP ===')) # cmake test cmd = 'which cmake >/dev/null 2>&1' ret = subprocess.call(cmd, shell=True) if ret==0: print(' Cmake is installed and accessible.') else: print(highlight_red(' Cmake not found: make sure that it is installed and accessible!')) # protobuf test cmd = 'which protoc >/dev/null 2>&1' ret = subprocess.call(cmd, shell=True) if ret==0: cmd = 'protoc --version 2>&1' pipe = subprocess.Popen(cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout st = pipe.read().strip() pipe.close() if st.find(' 2.6')>-1: print(' Protoc 2.6 is installed.') else: print(highlight_red(' Protoc did not report version 2.6, please check protobuf installation.')) else: print(highlight_red(' Protoc not found: make sure that protobuf 2.6.x is installed!')) # soa netboot specifics if self.is_soa_netboot(): print(highlight(' You are on SOA netboot')) java_package = 'java8' cmd = 'grep -q "'+java_package+'" ~/.rcinfo' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(' Java 1.8 is configured in rcinfo.')) else: if check_only: print(highlight_red(' You have to enable Java 1.8')) must_run_init = True else: print(highlight(' You have to enable Java 1.8 in rcinfo.\n')) while True: ans = raw_input(' Do you want to automatically append '+java_package+' to ~/.rcinfo ? (y/n): ') if ans in ['y', 'Y', 'yes']: rcname=os.getenv('HOME')+'/.rcinfo' try: f = open(rcname, 'a') f.write('\n'+java_package+'\n') f.close() print(highlight_green(' OK - re-login / reboot required after final steps.')) self.reboot_required = True except: print(highlight_red(' ... failed to write to '+rcname+' !')) break elif ans in ['n', 'N', 'no']: break # stand-alone Macs elif self.is_mac(): mac_ok = True print(highlight(' You are on Mac OS X')) print(' -- Make sure to take note of Mac-specific setup info in the SOA Wiki --') print(highlight(' '+TUTORIAL_URL+'#mac-os-x-systems')) cmd = '''java -version''' pipe = subprocess.Popen(cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout st = pipe.read().strip() pipe.close() try: sts = st.strip('\n').split('\n') ver = sts[0].split('"')[1].split('.') major, minor = int(ver[0]), int(ver[1]) if major>1 or (major==1 and minor>=8): print(" Currently selected java version %s.%s is recent enough."%(ver[0],ver[1])) else: if check_only: print(highlight_red(" Currently selected java version %s.%s is too old for certain projects."%(ver[0],ver[1]))) mac_ok = False must_run_init = True else: print(" Currently selected java version %s.%s is too old for certain projects."%(ver[0],ver[1])) print(" Trying to select newer java version >=1.8") java_set_cmd = '/usr/libexec/java_home -v 1.8' proc = subprocess.Popen(java_set_cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) res = proc.communicate() if proc.returncode == 0: self.append_config += '\n'+'export JAVA_HOME=`'+java_set_cmd+'` # select more recent console java' print(highlight_green(' Found a recent java version - it will be appended to .soarc later')) else: print(highlight_red(' WARNING - Unable to locate recent java >=1.8 - Install one!')) mac_ok = False except: print(highlight_red(' WARNING - Unable to determine java version - Install a jdk!')) mac_ok = False if check_only: must_run_init = True if not os.path.exists('/opt/local/lib/libQtCore.dylib') and not os.path.exists('/usr/local/lib/QtCore.framework/QtCore'): print(highlight_red(' Qt4 from Homebrew or MacPorts does not seem to be installed.')) mac_ok = False if not os.path.exists('/Library/Frameworks/Ogre.framework'): print(highlight_red(' Custom-built Ogre framework does not seem to be installed.')) mac_ok = False which_python_cmd = 'which python' proc = subprocess.Popen(which_python_cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) res = proc.communicate() if proc.returncode != 0: print(highlight_red(' Strange: no python found at all!')) mac_ok = False else: if res[0].find('/usr/bin/') == 0: print(highlight_red(' You are using stock python. Refer to the Wiki to change it to the Homebrew one.')) mac_ok = False if mac_ok: print(highlight_green(' You appear to have key points of the Mac setup in place.')) else: print(highlight_red(' You appear to lack basic OSX setup, check the Wiki before building projects')) else: print(highlight(' System is neither SOA netboot nor Mac: no more generic tests run.')) print(' -- Please ensure that Linux packages etc. are properly prepared (see Wiki): --') print(highlight(' '+TUTORIAL_URL+'#linux-systems')) if must_run_init: print(highlight_red('Please install software as indicated and run "soa init" to update the config.')) def attempt_passwordless_login(self, techfak_username): #print('------------ DISABLED REMOTE LOGIN CHECK ---------------') #return True print(' Attempting password-less ssh login of '+techfak_username+'@'+self.server_url+' ...') cmd = 'ssh -oBatchMode=yes '+techfak_username+'@'+self.server_url+' true >/dev/null 2>&1' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(' ... password-less login works.')) else: print(highlight_red(' ... password-less login failed.')+' Be prepared to enter the TechFak password often,\n or learn how to obtain and use a local Kerberos ticket (see Wiki).') def ask_access_config(self): ###### os.mkdir(path) print(' ---\n'+highlight('=== REMOTE FILE ACCESS CONFIGURATION ===')) username = None print(' Access will be through files.techfak.uni-bielefeld.de') self.use_http = False self.server_url = MAIN_REPO_SERVER_ADDRESS self.remote_git_root = SSH_REMOTE_GIT_ROOT default_username = self.techfak_username if default_username is None: default_username = os.getenv('USER') username = raw_input(' What is your TechFak user name? ['+default_username+']: ') if username.strip()=='': username = default_username self.attempt_passwordless_login(username) self.techfak_username = username def ask_path_setup(self): print(' ---\n'+highlight('=== PATH SETUP ===')) parallel_inst = False if self.repo_dir_ok: print(highlight(' You already have a working directory at '+self.repo_dir)) while True: ans = raw_input(' Do you wish to create another parallel working directory? (y/n): ') if ans in ['y', 'Y', 'yes']: parallel_inst = True break elif ans in ['n', 'N', 'no']: print(' OK - skipping.') return self.repo_dir, False if self.repo_dir != self.repo_dir_candidate: default_name = self.repo_dir_candidate else: default_name = None dirname = None while True: dirname = raw_input(' Path name of new working dir'+('' if default_name is None else (' [default='+default_name+'] '))+': ').strip() if default_name is not None and dirname=="": dirname = default_name if dirname != "": break if dirname is not None: dirname = os.path.abspath(dirname) while True: ans = raw_input(' Do you wish to proceed with '+dirname+' ? (y/n): ') if ans in ['y', 'Y', 'yes']: return dirname, parallel_inst #self.perform_init(dirname, parallel_inst) #break elif ans in ['n', 'N', 'no']: print(highlight(' OK - canceling.')) return None, None def perform_path_setup(self, path): self.repo_dir = path if os.path.exists(path): if os.path.isdir(path): # skip mkdir etc print(highlight(' Specified directory already exists - skipping creation.')) pass else: # error print(highlight_red(' Error - path already exists, but not a directory')) sys.exit(1) else: # mkdir print(highlight_green(' Creating new working directory '+path)) try: os.mkdir(path) except Exception, e: print(highlight_red(' Error creating the directory.')) sys.exit(1) def perform_basic_repo_setup(self, path): print(' ---\n'+highlight('=== BASIC PROJECT SETUP ===')) if os.path.exists(self.repo_dir+'/'+BUILD_SYSTEM_REPO_NAME): print(highlight(' Build system already in place, skipping.')) else: username = self.techfak_username if username is None: username = os.getenv('USER') ans = raw_input(' Will now try to download the build system. [Hit Enter]') cmd = '( cd '+self.repo_dir+' && git clone '+BUILD_SYSTEM_SOURCE+' )' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(' ... succeeded in cloning the build system.')) else: print(highlight_red(' ... fatal error - failed to clone the build system!')) sys.exit(1) def perform_config_file_setup(self, path, show_info=True): if show_info: print(' ---\n'+highlight('=== CONFIG FILES ===')) conf_dir = os.getenv('HOME')+'/.config' rsb_conf_name = conf_dir+'/rsb.conf' offer_config_write = True if os.path.exists(rsb_conf_name): print(highlight(' Default RSB config already in place at '+rsb_conf_name)) grep_cmd = 'grep -q rsbspread '+rsb_conf_name ret = subprocess.call(grep_cmd, shell=True) if ret==0: print(highlight(' It contains valid RSB plugin configuration.')) offer_config_write = False else: print(highlight(' ... but it has no RSB plugin configuration.')) else: print(highlight(' You do not have a default RSB config at '+rsb_conf_name)) if offer_config_write: while True: ans = raw_input(' Do you wish to create a default config there? (y/n): ') if ans in ['y', 'Y', 'yes']: try: if not os.path.exists(conf_dir): print(' Creating config directory '+conf_dir) os.mkdir(conf_dir) f = open(rsb_conf_name, 'w') f.write(DEFAULT_RSB_CONF) f.close() except: print(highlight_red(' ... failed to write to '+rsb_conf_name+' !')) break print(highlight_green(' ... created a global default rsb.conf')) break elif ans in ['n', 'N', 'no']: print(highlight(' OK - skipping. Make sure to configure RSB for ipaaca')) break def configure_pythonpath_and_path(self, path): config_str = '\nexport PYTHONPATH=$PYTHONPATH:$SOA_REPO_DIR/ipaaca/deps/python' config_str += '\nexport PATH=$PATH:$SOA_REPO_DIR/ipaaca/deps/scripts' return config_str def generate_config_string(self, path, additional_config_str): # soa-rcinfo command: provoke nfs mount and check for availability before running soarcinfo_bin = '/vol/soa/bin/soa-rcinfo' soarcinfo_cmd = '(cd /vol/soa; ); [ -e '+soarcinfo_bin+' ] && eval "`'+soarcinfo_bin+'`" || echo ".soarc: ERROR: failed to run '+soarcinfo_bin+'"' # build system-specific config string config_str = '' config_str += '[ -n "$SOA_DISCLAIMER_ACCEPTED" ] && return' config_str += '\nexport SOA_REPO_DIR='+path config_str += '\nexport SOA_DISCLAIMER_ACCEPTED='+('1' if self.accepted_soa_disclaimer else '0') #config_str += '\nexport SOA_USE_HTTP='+('1' if self.use_http else '0') config_str += ('' if self.server_url is None else ('\nexport SOA_SERVER_URL='+self.server_url)) config_str += ('' if self.techfak_username is None else ('\nexport TECHFAK_USERNAME='+self.techfak_username)) config_str += '\nexport MARY_BASE=$SOA_REPO_DIR/marytts' config_str += ('\nexport SOA_RCINFO_ILIST="spread $SOA_RCINFO_ILIST"\n'+soarcinfo_cmd if self.is_soa_netboot() else '') config_str += '\nexport ANT_OPTS="-Xmx256M"' config_str += additional_config_str config_str += self.append_config config_str += '\n' return config_str def perform_shell_setup(self, path, parall): ###### os.mkdir(path) print(' ---\n'+highlight('=== SHELL SETUP (final step) ===')) while True: ans = raw_input(' Do you want to include ipaaca in $PYTHONPATH and ipaaca-tools in $PATH? (Y/n): ') if ans in ['y', 'Y', 'yes']: additional_config_str = self.configure_pythonpath_and_path(path) break elif ans in ['n', 'N', 'no']: print(' Ok, skipping.') additional_config_str = '' break print(highlight('\n The following is the shell setup for this system:')) config_str = self.generate_config_string(path, additional_config_str) print(config_str) if parall: print(highlight(' NOTE: previous project directory was '+self.repo_dir)) while True: ans = raw_input(' Do you want to write it to ~/.soarc now? (Y/n): ') if ans in ['y', 'Y', 'yes']: confname = self.get_config_file_name() try: self.new_term_required = True f = open(confname, 'w') f.write(config_str) f.close() except Exception, e: print(highlight_red('Error: failed to write config file '+confname)) pass break elif ans in ['n', 'N', 'no']: print(' Ok, skipping. You should configure your shell setup.') break # source_string = '[ -e ~/.soarc ] && source ~/.soarc' shell = os.getenv('SHELL').split('/') if len(shell)>0 and shell[-1] != 'bash': print(highlight_red('\n Sorry - unable to automatically write the shell setup:')) print(highlight_red(' You seem not to be using a standard bash shell.')) print(highlight_red(' Please adapt the .soarc file in your home directory manually,')) print(highlight_red(' and make sure that it is sourced/included in your shell setup file!')) else: print('\n The soarc has to be sourced in your .bashrc:') print(source_string) grep_cmd = 'grep -q "'+source_string+'" ~/.bashrc' ret = subprocess.call(grep_cmd, shell=True) if ret==0: print(highlight(' OK: .soarc is already sourced in .bashrc')) else: print(highlight(' .soarc is not yet included in .bashrc')) while True: ans = raw_input(' Do you want to automatically include it in .bashrc ? (Y/n): ') if ans in ['y', 'Y', 'yes']: self.new_term_required = True rcname=os.getenv('HOME')+'/.bashrc' try: f = open(rcname, 'a') f.write(source_string+'\n') f.close() except Exception, e: print(highlight_red('Error: failed to append to .bashrc')) pass break elif ans in ['n', 'N', 'no']: print(' Ok, skipping. Do not forget to include it yourself.') break print('\n The soarc has to be sourced in your .bash_profile:') print(source_string) grep_cmd = 'grep -q "'+source_string+'" ~/.bash_profile' ret = subprocess.call(grep_cmd, shell=True) if ret==0: print(highlight(' OK: .soarc is already sourced in .bash_profile')) else: print(highlight(' .soarc is not yet included in .bash_profile')) while True: ans = raw_input(' Do you want to automatically include it in .bash_profile ? (Y/n): ') if ans in ['y', 'Y', 'yes']: self.new_term_required = True rcname=os.getenv('HOME')+'/.bash_profile' try: f = open(rcname, 'a') f.write(source_string+'\n') f.close() except Exception, e: print(highlight_red('Error: failed to append to .bash_profile')) pass break elif ans in ['n', 'N', 'no']: print(' Ok, skipping. Do not forget to include it yourself.') break # completionfile = os.getenv('HOME') + '/.bash_completion' grep_cmd = 'grep -q "' + BASH_COMPLETION_MARKER + '" ' + completionfile ret = subprocess.call(grep_cmd, shell=True) if ret==0: print(highlight('\n bash completion code already present in ' + completionfile)) else: print('\n You can install bash expansion for soa into ' + completionfile) while True: ans = raw_input(' Do you want to append soa autocompletion code there? (Y/n): ') if ans in ['y', 'Y', 'yes']: self.new_term_required = True try: f = open(completionfile, 'a') f.write(BASH_COMPLETION_SOURCE + '\n') f.close() except Exception, e: print(highlight_red('Error: failed to append to ' + completionfile)) pass break elif ans in ['n', 'N', 'no']: print(' Ok, skipped.') break def get_current_version_string(self, proj, show_proj_name=True): s = proj+': ' if not os.path.exists(self.repo_dir+'/'+proj): return ((proj + ': ') if show_proj_name else '') + '' else: cmd = '( cd '+self.repo_dir+'/'+proj+'; git log --pretty=format:"%h%x09%an%x09%ad%x09%s" -n1)' pipe = subprocess.Popen(cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE).stdout st = pipe.read().strip() pipe.close() return ((proj + ': ') if show_proj_name else '') + st def shorthelp_create(self, args): print(' create [--with-optional] create project from template') def longhelp_create(self, args): print(' The command "soa create" will create a new project from an available\n' +' template (see "soa list"). Instructions will be included in the newly-created\n' +' directory in the file INSTRUCTIONS.txt. Pull optional deps with --with-optional') def cmd_create(self, args): args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options if len(args)<4: print(highlight_red('Error - specify a template name and the new project name')) return templ = args[2] newname = args[3] if len(newname)==0 or ('/' in newname) or (newname[0] not in '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'): print(highlight_red('Error: invalid new project name: '+newname)) return successes, skips, fails = self.perform_clone(['TEMPLATE_'+templ], new_names={'TEMPLATE_'+templ:newname}) if len(successes)==0: return print(highlight('Cloning project dependencies, where not yet present ...')) required, optional = self.parse_project_deps(newname) clone_projs = set(required) if with_optional: clone_projs = clone_projs.union(set(optional)) if not self.perform_clone_all(clone_projs, with_optional=with_optional): return if not with_optional: if len(optional) > 0: print(highlight_red('The following optional dependencies were not considered: '+' '.join(optional))) print(highlight_red('Please clone and build them manually if you want to use them.')) print(highlight('Storing version information in template_version.txt ...')) ver_str = "\n" ver_str += "Version information of template '"+templ+"' at time of creation...\n" ver_str += self.get_current_version_string(newname, show_proj_name = False)+'\n' ver_str += "\nVersions of dependencies at time of creation ...\n" projs, depmap, not_found = self.parse_project_deps_deep_build_order(newname, with_optional=with_optional) for p in projs: ver_str += self.get_current_version_string(p)+'\n' ver_str += "\n" print(ver_str) base_dir = self.repo_dir+'/'+newname logname = base_dir + '/template_version.txt' f = open(logname, 'w') f.write(ver_str) f.close() print(highlight('Stripping template git information ...')) git_dir = base_dir + '/.git/' cmd = 'rm -rf '+git_dir ret = subprocess.call(cmd, shell=True) print(highlight('Creating new local repository (not yet adding and committing) ...')) cmd = '( cd '+base_dir+'; git init )' #; git add . )' ret = subprocess.call(cmd, shell=True) print(highlight_green('... done. New project created: '+newname)) def shorthelp_clone(self, args): print(' clone [ ...] clone remote git project[s] to working dir') def longhelp_clone(self, args): print(' The "soa clone " command will clone a project from the remote\n' +' server. See "soa list" for a list of available projects. If a project of\n' +' the same name already exists, "soa clone" will skip it. To clone a project\n' +' and all dependencies, use "soa clone-all "') def cmd_clone(self, args): self.enforce_env() if len(args)<3: print(highlight_red('Error - specify at least one remote project to clone')) return self.perform_clone(args[2:]) def perform_clone(self, projs, new_names=None): if new_names is None: new_names = {} successes, skips, fails = [], [], [] username = self.techfak_username if username is None: username = os.getenv('USER') for proj in projs: new_name = new_names[proj] if proj in new_names else "" new_name_append = ' '+new_name source = None try: proj, source = proj.split('@', 1) except: pass if (new_name!='' and os.path.exists(self.repo_dir+'/'+new_name)) or (new_name=='' and os.path.exists(self.repo_dir+'/'+proj)): print(highlight('Skipping '+proj+' - directory already exists')) skips.append(proj) else: print(highlight('Cloning '+proj+' ... ')) if source is not None: if source in self.extra_sources: print(highlight(' ... using specified source "'+source+'"')) else: print(highlight_red(' ... ignoring unknown source "'+source+'"')) source = None sys.stdout.flush() if source is None: sources = [None] sources.extend(self.extra_sources.keys()) else: sources = [source] ok = False for i,s in enumerate(sources): if i>0: print(highlight(' ... trying the next available source.')) if s is not None: print(highlight(' ... trying source "'+s+'"')) sourceobj = None if s is None else self.extra_sources[s] cmd = '( cd '+self.repo_dir+' && git clone '+self.make_git_url(proj, sourceobj) + new_name_append + ' )' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(' ... clone succeeded.')) successes.append(proj) ok = True break else: continue if not ok: print(highlight_red(' ... clone failed!')) fails.append(proj) for proj in successes: self.perform_repo_fix(proj, [proj]) return successes, skips, fails def parse_project_dep_files(self, proj): required = [] optional = [] found = True depfile = self.repo_dir+'/'+proj+'/DEPS.txt' resfile = self.repo_dir+'/'+proj+'/resolve.sh' if os.path.exists(depfile): #print(proj+": New-style DEP") try: f = open(depfile, 'r') data = f.read().split('\n') f.close() for l in data: ls = l.strip() if ls.find('REQUIRED') == 0: tok = ls.split('"') required = tok[1].split() if proj in required: print(highlight_red('Gracefully ignoring self-dependency in '+proj)) required.remove(proj) if ls.find('OPTIONAL') == 0: tok = ls.split('"') optional = tok[1].split() if proj in optional: print(highlight_red('Gracefully ignoring self-dependency in '+proj)) optional.remove(proj) except Exception, e: return [] elif os.path.exists(resfile): #print(proj+": Old-style res") try: f = open(resfile, 'r') data = f.read().split('\n') f.close() for l in data: ls = l.strip() if ls.find('PACKAGES') == 0: tok = ls.split('"') required = tok[1].split() if proj in required: print(highlight_red('Gracefully ignoring self-dependency in '+proj)) required.remove(proj) except Exception, e: pass #return [], [] else: found = False pass #return [], [] #print(' Result: '+str(required)+' '+str(optional)) return required, optional, found def parse_project_deps(self, proj): required, optional, found = self.parse_project_dep_files(proj) return required, optional def parse_project_deps_deep_build_order(self, proj, projs=None, depmap=None, not_found=None, with_optional=False): first_elem = False if projs is None: first_elem = True projs = set() depmap = dict() not_found = set() #depfile = self.repo_dir+'/'+proj+'/resolve.sh' #deps = [] #if os.path.exists(depfile): # try: # f = open(depfile, 'r') # data = f.read().split('\n') # f.close() # for l in data: # ls = l.strip() # if ls.find('PACKAGES') == 0: # tok = ls.split('"') # deps=tok[1].split() # except Exception, e: # return projs, depmap deps, opt, found = self.parse_project_dep_files(proj) if not found: not_found.add(proj) selected_deps = set(deps) if with_optional: selected_deps = selected_deps.union(set(opt)) depmap[proj] = set(selected_deps) projs.add(proj) for p in selected_deps: if p not in projs: p, d, n = self.parse_project_deps_deep_build_order(p, projs, depmap, not_found, with_optional=with_optional) #if first_elem: # tail.append(proj) return projs, depmap, not_found def generate_build_order(self, projs, depmap): result = [] while True: kill_list = [] old_len = len(projs) for p in projs: if len(depmap[p])==0: result.append(p) for r in projs: if p != r: if p in depmap[r]: depmap[r].remove(p) kill_list.append(p) #continue for p in kill_list: projs.remove(p) if len(projs)==0: break if len(projs)==old_len: print(highlight_red('Failed to resolve dependencies! - cycles?')) return None return result def perform_clone_all(self, projs, with_optional=False): '''recursively clone projects and their deps''' clone_set = set(projs) cloned_set = set() while True: succ, skip, fail = self.perform_clone(list(clone_set)) if len(fail) > 0: print(highlight_red('There were errors, canceling clone-all.')) return False cloned_set = cloned_set.union(clone_set) clone_set = set() for proj in succ: req, opt = self.parse_project_deps(proj) clone_set = clone_set.union(set(req)) if with_optional: clone_set = clone_set.union(set(opt)) for proj in skip: req, opt = self.parse_project_deps(proj) clone_set = clone_set.union(set(req)) if with_optional: clone_set = clone_set.union(set(opt)) clone_set = clone_set.difference(cloned_set) if len(clone_set) == 0: print(highlight_green('Done.')) return True # TODO otherwise just be silent and fall through ?? def shorthelp_clone_all(self, args): print(' clone-all [--with-optional] [

...] like clone, but also clone deps') def longhelp_clone_all(self, args): print(' The "soa clone-all " command will clone a project with all\n' +' dependencies from the remote server, skipping already existing directories.\n' +' Specifying --with-optional will also include (all) optional dependencies.') def cmd_clone_all(self, args): self.enforce_env() args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options if len(args)<3: print(highlight_red('Error - specify at least one remote project to clone')) return self.perform_clone_all(args[2:], with_optional=with_optional) def shorthelp_pull(self, args): print(' pull [ ...] same as "update"') def longhelp_pull(self, args): self.longhelp_update(args) def cmd_pull(self, args): self.cmd_update(args) def shorthelp_update(self, args): print(' update [ ...] update specified project[s] from git') def longhelp_update(self, args): print(' The "soa update " command will update a single project\n' +' using "git pull". Use git manually for more fine-grained control!') def cmd_update(self, args): self.enforce_env() projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to update')) return else: projs = args[2:] for p in projs: succ = self.perform_update(p) if not succ: sys.exit(1) def perform_update(self, proj, ask=False): t0 = time.time() if not os.path.exists(self.repo_dir+'/'+proj): print(highlight_red(' Project directory not found: '+proj)) return -1 print(highlight('Updating '+proj+' ... ')) cmd = '( cd '+self.repo_dir+'/'+proj+' && git diff --exit-code >/dev/null )' ret = subprocess.call(cmd, shell=True) if ret==0: # no local changes pass else: print(highlight(' Local changes in '+proj+'. It might be best to resolve them first.')) while True: ans = raw_input(' Git can merge non-conflicting files. Continue with "git pull"? (y/n): ') if ans in ['y', 'Y', 'yes']: break elif ans in ['n', 'N', 'no']: print(highlight(' OK - skipping update of '+proj)) return 0 # update cmd = '( cd '+self.repo_dir+'/'+proj+' && git pull )' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green(proj+': update succeeded.')) self.perform_repo_fix(proj, [proj]) return 1 else: print(highlight_red(proj+': update failed!')) return -1 def perform_update_all(self, proj, ask=True, with_optional=False): t0 = time.time() projs, depmap, not_found = self.parse_project_deps_deep_build_order(proj, with_optional=with_optional) ordered = self.generate_build_order(projs, depmap) if ordered is None: sys.exit(1) print(highlight('Updating the following project list:\n'+' '.join(ordered))) for p in ordered: ret = self.perform_update(p, ask) if ret==-1: print(highlight_green(p+': recursive update aborted.')) sys.exit(1) print(highlight_green(p+': recursive update completed.')) def shorthelp_pull_all(self, args): print(' pull-all [

...] same as "update-all"') def longhelp_pull_all(self, args): self.longhelp_update_all(args) def shorthelp_update_all(self, args): print(' update-all [--with-optional] [

...] like update; also update deps') def longhelp_update_all(self, args): print(' The "soa update-all " command will update a project, and all\n' +' projects it depends on, in reverse order. See "soa update".\n' +' Specifying --with-optional will also update (all) optional dependencies.') def cmd_pull_all(self, args): self.cmd_update_all(args) def cmd_update_all(self, args): self.enforce_env() args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to update')) return else: projs = args[2:] self.perform_update(BUILD_SYSTEM_REPO_NAME, ask=True) # why was this in the loop below? for p in projs: self.perform_update_all(p, ask=True, with_optional=with_optional) def shorthelp_build(self, args): print(' build [ ...] build specified project[s] using ant') def longhelp_build(self, args): print(' The "soa build " command will build a single project\n' +' (using local dependency resolution and "ant dist"). To build all\n' +' dependencies recursively, use "soa build-all "\n' +' Options (slightly faster if you know when they are OK to use):\n' +' --skip-ant-resolve Skip "ant resolve" step (often not used)\n' +' --skip-local-resolve Skip local resolution (slight speed gain)\n' +' --fast Shortcut to skip both resolutions') def cmd_build(self, args): self.enforce_env() args, options = extract_options(args, {'skip-ant-resolve':False, 'skip-local-resolve':False, 'fast':False}) if 'fast' in options: options['skip-ant-resolve'] = True options['skip-local-resolve'] = True projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to build')) return else: projs = args[2:] for p in projs: succ = self.perform_build(p, options) if not succ: sys.exit(1) def build_resolve_script(self, proj, opts): ivy_disable='#' if 'skip-ant-resolve' in opts else '' local_disable='#' if 'skip-local-resolve' in opts else '' basepath=self.list_to_path(['..']*len(self.path_to_list(proj))) print(basepath) return RESOLVE_SCRIPT_SOURCE%(basepath, ivy_disable, local_disable) def perform_build(self, proj, build_options=None): opts = {} if build_options is None else build_options t0 = time.time() print(highlight('Building '+proj+' ... ')) # Write resolve script resolve_file_name = self.repo_dir+'/'+proj+'/.resolve.sh' f = open(resolve_file_name, 'w') script_text = self.build_resolve_script(proj, opts) f.write(script_text) f.close() # Resolve cmd = '( cd '+self.repo_dir+'/'+proj+' && chmod +x ./.resolve.sh && ./.resolve.sh; )' ret = subprocess.call(cmd, shell=True) # Build cmd = '( cd '+self.repo_dir+'/'+proj+' && ant dist )' ret = subprocess.call(cmd, shell=True) ss = time.time()-t0 hh = ss // 3600 ss -= hh*3600 mm = ss // 60 ss -= mm*60 print('Time elapsed for build: %02d:%02d:%02d'%(int(hh),int(mm),int(ss))) if ret==0: print(highlight_green(proj+': build succeeded.')) # Resolve again (locally) cmd = '( cd '+self.repo_dir+'/'+proj+' && ./.resolve.sh ' + proj +'; rm ./.resolve.sh )' ret = subprocess.call(cmd, shell=True) return True else: print(highlight_red(proj+': build failed!')) # Remove resolve script cmd = '( cd '+self.repo_dir+'/'+proj+' && rm ./.resolve.sh )' return False def shorthelp_deps(self, args): print(' deps [--with-optional] [ [

...]] show project dependencies') def longhelp_deps(self, args): print(' The "soa deps [ ...]" command will show the recursive\n' +' dependency tree of projects in build order for "soa build-all".\n' +' Specifying --with-optional will also show optional dependencies.') def cmd_deps(self, args): self.enforce_env() args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project show deps for')) return else: projs = args[2:] for p in projs: pr, dep, not_found = self.parse_project_deps_deep_build_order(p, with_optional=with_optional) ordered = self.generate_build_order(pr, dep) print(' '.join(ordered)) for f in not_found: print('# NOTE: "'+f+'" is not in your repo, or has no DEPS.txt: not followed') def perform_build_all(self, proj, with_optional=False): t0 = time.time() projs, depmap, not_found = self.parse_project_deps_deep_build_order(proj, with_optional=with_optional) ordered = self.generate_build_order(projs, depmap) if ordered is None: sys.exit(1) print(highlight('Building the following project list:\n'+' '.join(ordered))) for p in ordered: succ = self.perform_build(p) if not succ: ss = time.time()-t0 hh = ss // 3600 ss -= hh*3600 mm = ss // 60 ss -= mm*60 print('Total time elapsed: %02d:%02d:%02d'%(int(hh),int(mm),int(ss))) print(highlight_red('Errors during recursive build of '+p)) sys.exit(1) ss = time.time()-t0 hh = ss // 3600 ss -= hh*3600 mm = ss // 60 ss -= mm*60 print('Total time elapsed: %02d:%02d:%02d'%(int(hh),int(mm),int(ss))) print(highlight_green(p+': all done, recursive build succeeded.')) def shorthelp_build_all(self, args): print(' build-all [--with-optional] [

...] like build, but also build deps') def longhelp_build_all(self, args): print(' The "soa build-all " command will build a project, and all\n' +' projects it depends on, in reverse order. See "soa build".\n' +' Specifying --with-optional will also build (all) optional dependencies.') def cmd_build_all(self, args): self.enforce_env() args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to build')) return else: projs = args[2:] for p in projs: self.perform_build_all(p, with_optional=with_optional) def shorthelp_clean(self, args): print(' clean [ ...] rid specified project[s] of all build files') def longhelp_clean(self, args): print(' The "soa clean " command will clean a single project,\n' +' removing all files generated during build, as well as cached dependencies;' +' subsequent "build" and "build-all" commands will perform a full rebuild.') def cmd_clean(self, args): self.enforce_env() projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to clean')) return else: projs = args[2:] for p in projs: self.perform_clean(p) def perform_clean(self, proj): print(highlight('Cleaning '+proj+' ... ')) cmd = '( cd '+self.repo_dir+'/'+proj+' && ant clean )' ret = subprocess.call(cmd, shell=True) def perform_clean_all(self, proj, with_optional=False): projs, depmap, not_found = self.parse_project_deps_deep_build_order(proj) ordered = self.generate_build_order(projs, depmap) if ordered is None: sys.exit(1) print(highlight('Cleaning the following project list:\n'+' '.join(ordered))) for p in ordered: succ = self.perform_clean(p) def shorthelp_clean_all(self, args): print(' clean-all [--with-optional] [

...] like clean, but also clean deps') def longhelp_clean_all(self, args): print(' The "soa clean-all " command will clean a project, as well\n' +' as all the projects it depends on, removing all files generated during build.\n' +' Also see "soa clean". --with-optional will also clean (all) optional deps.') def cmd_clean_all(self, args): self.enforce_env() args, options = extract_options(args, {'with-optional':False} ) with_optional = 'with-optional' in options projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify at least one project to clean')) return else: projs = args[2:] for p in projs: self.perform_clean_all(p, with_optional=with_optional) def shorthelp_run(self, args): print(' run run specified project using "ant run"') def longhelp_run(self, args): print(' The "soa run " command will invoke "ant run" in the project\'s\n' +' directory. Note that you may have to specify a "run" target in the main\n' +' build.xml file of the project for this to work.') def cmd_run(self, args): self.enforce_env() projs = [] if len(args)<3: projs = self.guess_project_name_based_on_cwd() if len(projs)==0: print(highlight_red('Error - specify a project to run')) return else: projs = args[2:] proj = projs[0] print(highlight('Running '+proj+' ... ')) cmd = '( cd '+self.repo_dir+'/'+proj+' && ant run )' ret = subprocess.call(cmd, shell=True) if ret==0: print(highlight_green('Exit with success.')) else: print(highlight_red('Exit with error code '+str(ret)+'.')) def shorthelp_showconfig(self, args): print(' showconfig show go script config in a resource dir') def longhelp_showconfig(self, args): print(' The "soa showconfig" shows variables used in ./go... scripts.\n' +' It will work only if such files are present in the current directory.\n' +' (Probably run from resource/ dir). You can override vars in scs.conf') def cmd_showconfig(self, args): grep_vars_cmd = '''grep -h -- '\[ -z' go.* 2>/dev/null|sort|uniq|cut -d'&' -f3-''' proc = subprocess.Popen(grep_vars_cmd, shell=True, bufsize=16384, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) res = proc.communicate() print('## Variable defaults used in go... scripts residing in this directory.') print('## (You should copy them to scs.conf to modify their values)') print(res[0]) def soa_main(): cmd = None arg=sys.argv[:] soa = SOA(arg[1] if len(arg)>1 else None) soa.enforce_accept_disclaimer() #w_paths, n_w_paths = determine_writeable_paths() #print(str(w_paths)) #sys.exit(0) if ENABLE_UPDATE_CHECKS: soa.update_check() cmd = 'help' if len(arg) > 1: cmd = arg[1] handler = None try: cmd_ = cmd.replace('-', '_') handler = getattr(soa, 'cmd_'+cmd_) except Exception, e: pass if handler is None: soa.print_soa_title() print(highlight_red('Error - Unknown command "'+cmd+'"; run "soa help" for help')) else: handler(arg) #else: # soa.print_soa_title() # print(highlight_red('Error - Specify a command to run; or "soa help" for help')) if __name__ == '__main__': if sys.stdout.isatty() or len(sys.argv)>1: try: soa_main() except KeyboardInterrupt: print("\nAborted by user.") else: GUI_TYPE = None try: from PyQt4 import QtGui class InitialConfigDialog(QtGui.QWidget): def __init__(self): super(InitialConfigDialog, self).__init__() self.initUI() def initUI(self): self.label = QtGui.QLabel('The soa script should be started in an interactive shell.', self) self.label.move(20, 20) self.termbutton = QtGui.QPushButton('Launch "soa init" in Terminal', self) self.termbutton.move(30, 50) self.termbutton.clicked.connect(self.launchInTerminal) self.quitbutton = QtGui.QPushButton('Quit', self) self.quitbutton.move(280, 50) self.quitbutton.clicked.connect(self.quit) self.setGeometry(700, 400, 400, 100) self.setWindowTitle('soa script - GUI launcher') self.show() self.raise_() def launchInTerminal(self): terminal_relaunch_self() def quit(self): sys.exit(0) #def showDialog(self): # text, ok = QtGui.QInputDialog.getText(self, 'Input Dialog', # 'Enter your name:') # if ok: # self.le.setText(str(text)) GUI_TYPE = 'qt' except Exception, e: pass if GUI_TYPE == 'qt': app = QtGui.QApplication(sys.argv) d = InitialConfigDialog() sys.exit(app.exec_())