From 29c4c0e1085c8feaa207c0f191177a964fe3f8cb Mon Sep 17 00:00:00 2001 From: Galit Date: Sun, 29 Apr 2018 12:08:24 +0300 Subject: [PATCH 01/22] working on the new finctionality to check the lago setup --- lago/verify_configuration.py | 247 +++++++++++++++++++++++++++++++++++ 1 file changed, 247 insertions(+) create mode 100644 lago/verify_configuration.py diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py new file mode 100644 index 00000000..0f31ba7a --- /dev/null +++ b/lago/verify_configuration.py @@ -0,0 +1,247 @@ +# +# Copyright 2014 Red Hat, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# Refer to the README and COPYING files for full details of the license +# + +# permissions +# group +# and configure +# ask the user to run with sudo + +#groups qemu,libvirt - USERNAME +#groups USERNAME - qemu + +#/var/lib/lago +# owner USERNAME:USERNAME +# systemctl restart libvirtd + +import os +import commands +import argparse +import sys +import getpass + +class VerifyLagoStatus(object): + """ + Verify configuration: + """ + verificationStatus = False + def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,verify_status): + print('__init__ is the constructor for a class VerifyLagoStatus') + self.username = username + self.envs_dir = envs_dir + self.groups = groups + self.nested = nested + self.virtualization = virtualization + self.lago_env_dir = lago_env_dir + + VerifyLagoStatus.verificationStatus = verify_status + + def __del__(self): + print('__del__ is the destructor for a class VerifyLagoStatus') + + def __enter__(self): + print('__enter__ is for context manager VerifyLagoStatus') + return self + + def __exit__(self, exc_type, exc_value, traceback): + print('__exit__ is for context manager VerifyLagoStatus') + + def displayLagoStatus(self): + print "Nested:" + self.nested + print "Virtualization: " + self.virtualization + print "Groups: " + self.groups + print "Lago Environment Directory " + self.envs_dir + " " + self.lago_env_dir + print "Status:" + str(VerifyLagoStatus.verificationStatus) + + def return_status(status): + if status == 'Y': + return "OK" + else: + return "Not OK" + +def validate_status(list_status): + status = True + if 'N' in list_status : + status = False + return status + +def check_virtualization(): + if os.system("dmesg | grep -q 'kvm: disabled by BIOS'"): + virtualization = 'N' + else: + virtualization = 'Y' + return virtualization + +def get_cpu_vendor(): + Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") + if Input == 'GenuineIntel': + #print "intel" + vendor = "intel" + elif vendor == 'AuthenticAMD': + #print "amd" + vendor = "amd" + else: + #print "unrecognized CPU vendor: $vendor, only Intel/AMD are supported" + vendor = "problem" + return vendor + +def check_nested(vendor): + mod="kvm_"+vendor + cmd = "cat /sys/module/"+mod+"/parameters/nested" + is_enabled= commands.getoutput(cmd) + if is_enabled == 'Y': + return 'Y' + else: + return 'N' + +def check_groups(username): + ## all groups username in + groups_username = commands.getoutput("groups " + username) + status_username = all(x in groups_username for x in ['qemu','libvirt','lago',username]) + groups_qemu = commands.getoutput("groups qemu") + status_qemu = all(x in groups_qemu for x in [username]) + if ( status_username & status_qemu ): + return 'Y' + else: + return 'N' + +def check_permissions(envs_dirs,username): + + uid = commands.getoutput("id -u " + username) + gid = commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") + status = True + print "check_permissions Var: " + envs_dirs + for dirpath, dirnames, filenames in os.walk(envs_dirs): + print "Dirpath: " + dirpath + for dirname in dirnames: + if ( os.stat(os.path.join(dirpath, dirname)).st_uid != uid ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != gid): + status = False + + for filename in filenames: + if ( os.stat(os.path.join(dirpath, filename)).st_uid != uid ) & (os.stat(os.path.join(dirpath, filename)).st_gid != gid): + status = False + + if ( status ): + return 'Y' + else: + return 'N' + +def change_permissions(envs_dirs,username): + ## sudo chown -R USERNAME:USERNAME /var/lib/lago + + uid = commands.getoutput("id -u " + username) + gid = commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") + + for dirpath, dirnames, filenames in os.walk(envs_dirs): + for dirname in dirnames: + os.chown(os.path.join(dirpath, dirname), uid, gid) + for filename in filenames: + os.chown(os.path.join(dirpath, filename), uid, gid) + +def reload_kvm(): + """ + reload kvm + """ + +def reload_nested(): + """ + reload kvm + """ + +def enable_service(): + """ + enable service + """ + +def enable_services(): + """ + enable services + """ +def main(argv): + + username = '' + envs_dir = '' + running_user=Input = getpass.getuser() + parser = argparse.ArgumentParser(description='Verify that the machine that Lago runs on is well configured') + #parser.add_argument('-u','--username', help='Description for foo argument', required=True) + parser.add_argument('-u','--username', help='Which user needs to be configured',default=running_user) + parser.add_argument('-e','--envs-dir', help='Which directory the qemu has access permissions', default='/var/lib/lago',dest='envs_dir') + parser.add_argument('-v','--verify', help='Return report that describes which configurations are OK, and which are not.', action='store_true') + + args = vars(parser.parse_args()) + + + if (args['verify'] == False) & (os.getuid() != 0): + print "Please use 'sudo', you need adminstrator permissions for configuration" + exit(1) + if args['username']: + # code here + username = args['username'] + print args['username'] + + if args['envs_dir']: + # code here + envs_dir = args['envs_dir'] + print args['envs_dir'] + + if args['verify']: + # code here + verify = args['verify'] + #print args['verify'] + print "Configuration Status:" + vendor = get_cpu_vendor() + nested = check_nested(vendor) + virtualization = check_virtualization() + groups = check_groups(args['username']) + lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) + # if not ok update .... + # Groups, Lago env, + # virtualization .. msg ... + # + #virt-host-validate + verify_status = validate_status([groups,nested,virtualization,lago_env_dir]) + verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,verify_status) + verify.displayLagoStatus() + + +if __name__ == "__main__": + main(sys.argv[1:]) + + + + + + +class Setup(object): + """ + Setup on configure parameters: + """ + + def __init__(self, username, envs_dir, groups, verify ): + """__init__ + Args: + username (str): username Lago was installed + envs_dir (str): DirectoryDefault dictonary to load, can be empty. + """ + + self.username = username + self.envs_dir = envs_dir + self.groups = groups + self.verify = verify + From aecb7eb61de35a3ac1a1a27a6185e9107655907d Mon Sep 17 00:00:00 2001 From: Galit Date: Sun, 29 Apr 2018 18:35:31 +0300 Subject: [PATCH 02/22] fix the verify option --- lago/verify_configuration.py | 113 +++++++++++++++++++++++------------ 1 file changed, 75 insertions(+), 38 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 0f31ba7a..c8013965 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -41,35 +41,44 @@ class VerifyLagoStatus(object): Verify configuration: """ verificationStatus = False - def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,verify_status): - print('__init__ is the constructor for a class VerifyLagoStatus') + def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,verify_status): + #print('__init__ is the constructor for a class VerifyLagoStatus') self.username = username self.envs_dir = envs_dir self.groups = groups self.nested = nested self.virtualization = virtualization self.lago_env_dir = lago_env_dir - + self.kvm_configure = kvm_configure VerifyLagoStatus.verificationStatus = verify_status - def __del__(self): - print('__del__ is the destructor for a class VerifyLagoStatus') - - def __enter__(self): - print('__enter__ is for context manager VerifyLagoStatus') - return self - - def __exit__(self, exc_type, exc_value, traceback): - print('__exit__ is for context manager VerifyLagoStatus') - def displayLagoStatus(self): - print "Nested:" + self.nested - print "Virtualization: " + self.virtualization - print "Groups: " + self.groups - print "Lago Environment Directory " + self.envs_dir + " " + self.lago_env_dir - print "Status:" + str(VerifyLagoStatus.verificationStatus) - - def return_status(status): + print "Configuration Status:" + print "=====================" + print "Username used by Lago: " + self.username + print "Environment directory used by Lago: " + self.envs_dir + print "Nested: " + self.return_status(self.nested) + print "Virtualization: " + self.return_status(self.virtualization) + print "Groups: " + self.return_status(self.groups) + print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) + print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (VerifyLagoStatus.verificationStatus == False): + print "Please read configuration setup:" + print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + + def fixLagoConfiguration(self): + print "Nested: " + self.return_status(self.nested) + print "Virtualization: " + self.return_status(self.virtualization) + print "Groups: " + self.return_status(self.groups) + print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) + print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (VerifyLagoStatus.verificationStatus == False): + print "Please read configuration setup:" + print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + + def return_status(self,status): if status == 'Y': return "OK" else: @@ -91,7 +100,6 @@ def check_virtualization(): def get_cpu_vendor(): Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") if Input == 'GenuineIntel': - #print "intel" vendor = "intel" elif vendor == 'AuthenticAMD': #print "amd" @@ -101,6 +109,23 @@ def get_cpu_vendor(): vendor = "problem" return vendor +def is_virtualization_enable(): + res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") + if res == "": + status = "N" + else: + status = "Y" + return status + +def check_kvm_configure(vendor): + res = commands.getoutput("lsmod | grep kvm_"+vendor) + if res == "": + status = "N" + else: + status = "Y" + return status + + def check_nested(vendor): mod="kvm_"+vendor cmd = "cat /sys/module/"+mod+"/parameters/nested" @@ -123,20 +148,19 @@ def check_groups(username): def check_permissions(envs_dirs,username): + status = True + uid = commands.getoutput("id -u " + username) gid = commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") - status = True - print "check_permissions Var: " + envs_dirs + + #print "check_permissions Var: " + envs_dirs for dirpath, dirnames, filenames in os.walk(envs_dirs): - print "Dirpath: " + dirpath for dirname in dirnames: if ( os.stat(os.path.join(dirpath, dirname)).st_uid != uid ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != gid): status = False - for filename in filenames: if ( os.stat(os.path.join(dirpath, filename)).st_uid != uid ) & (os.stat(os.path.join(dirpath, filename)).st_gid != gid): status = False - if ( status ): return 'Y' else: @@ -177,6 +201,7 @@ def main(argv): username = '' envs_dir = '' + msg='' running_user=Input = getpass.getuser() parser = argparse.ArgumentParser(description='Verify that the machine that Lago runs on is well configured') #parser.add_argument('-u','--username', help='Description for foo argument', required=True) @@ -193,31 +218,43 @@ def main(argv): if args['username']: # code here username = args['username'] - print args['username'] + uid = commands.getoutput("id -u " + username) + if ( uid == "no such user" ): + msg = "\'"+username+"\'"+ " username doesn't exists" if args['envs_dir']: # code here envs_dir = args['envs_dir'] - print args['envs_dir'] - + if (os.path.isdir(envs_dir)==False): + msg = "\'"+envs_dir+"\'"+ " envs_dir doesn't exists" + + if (msg): + print "Error: " + msg + exit(1) + + + vendor = get_cpu_vendor() + nested = check_nested(vendor) + #virtualization = check_virtualization() + virtualization = is_virtualization_enable() + groups = check_groups(args['username']) + lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) + kvm_configure = check_kvm_configure(vendor) + if args['verify']: # code here verify = args['verify'] #print args['verify'] - print "Configuration Status:" - vendor = get_cpu_vendor() - nested = check_nested(vendor) - virtualization = check_virtualization() - groups = check_groups(args['username']) - lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) - # if not ok update .... + + # if not ok update .... # Groups, Lago env, # virtualization .. msg ... # #virt-host-validate - verify_status = validate_status([groups,nested,virtualization,lago_env_dir]) - verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,verify_status) + verify_status = validate_status([groups,nested,virtualization,lago_env_dir]) + verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,verify_status) verify.displayLagoStatus() + if __name__ == "__main__": From 5140a16606d8493e31956094c5934fe04a5fbd0c Mon Sep 17 00:00:00 2001 From: Galit Date: Sun, 29 Apr 2018 18:50:01 +0300 Subject: [PATCH 03/22] added change_permissions and fix the check_permission uid/gid to be integer type --- lago/verify_configuration.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index c8013965..bfb52f22 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -66,6 +66,9 @@ def displayLagoStatus(self): if (VerifyLagoStatus.verificationStatus == False): print "Please read configuration setup:" print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + return 2 + else: + return 0 def fixLagoConfiguration(self): print "Nested: " + self.return_status(self.nested) @@ -147,13 +150,10 @@ def check_groups(username): return 'N' def check_permissions(envs_dirs,username): - status = True - - uid = commands.getoutput("id -u " + username) - gid = commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") + uid = int(commands.getoutput("id -u " + username) ) + gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) - #print "check_permissions Var: " + envs_dirs for dirpath, dirnames, filenames in os.walk(envs_dirs): for dirname in dirnames: if ( os.stat(os.path.join(dirpath, dirname)).st_uid != uid ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != gid): @@ -167,17 +167,14 @@ def check_permissions(envs_dirs,username): return 'N' def change_permissions(envs_dirs,username): - ## sudo chown -R USERNAME:USERNAME /var/lib/lago - - uid = commands.getoutput("id -u " + username) - gid = commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") - + uid = int(commands.getoutput("id -u " + username) ) + gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) for dirpath, dirnames, filenames in os.walk(envs_dirs): for dirname in dirnames: os.chown(os.path.join(dirpath, dirname), uid, gid) for filename in filenames: os.chown(os.path.join(dirpath, filename), uid, gid) - + def reload_kvm(): """ reload kvm @@ -232,7 +229,7 @@ def main(argv): print "Error: " + msg exit(1) - + ## check what is configure vendor = get_cpu_vendor() nested = check_nested(vendor) #virtualization = check_virtualization() @@ -254,7 +251,11 @@ def main(argv): verify_status = validate_status([groups,nested,virtualization,lago_env_dir]) verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,verify_status) verify.displayLagoStatus() - + else: + # fix configuration + if (lago_env_dir == 'N'): + change_permissions(envs_dir,username) + print "check permission: " + str(check_permissions(envs_dir,username)) if __name__ == "__main__": From c23a0657d3fcd091f7317ae0055f82ad237de528 Mon Sep 17 00:00:00 2001 From: Galit Date: Sun, 29 Apr 2018 20:58:27 +0300 Subject: [PATCH 04/22] added a check to packages required --- lago/verify_configuration.py | 52 +++++++++++++++++------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index bfb52f22..aaa8f725 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -149,6 +149,10 @@ def check_groups(username): else: return 'N' +def change_groups(username): + a = os.system("usermod -a -G qemu,libvirt,lago " + username) + b = os.system("usermod -a -G " + username + " qemu" ) + def check_permissions(envs_dirs,username): status = True uid = int(commands.getoutput("id -u " + username) ) @@ -175,6 +179,22 @@ def change_permissions(envs_dirs,username): for filename in filenames: os.chown(os.path.join(dirpath, filename), uid, gid) +def check_packages_installed(): + missing_pkg = [] + status = "Y" + #yum install -y epel-release centos-release-qemu-ev + #yum install -y python-devel libvirt libvirt-devel \ + #libguestfs-tools libguestfs-devel gcc libffi-devel \ + #openssl-devel qemu-kvm-ev + #yum list installed {PACKAGE_NAME_HERE} + rpm_output = commands.getoutput("rpm -qa ") + for pkg in ["girl","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"]: + if pkg not in rpm_output: + missing_pkg.append(pkg) + status = 'N' + return (status,missing_pkg) + + def reload_kvm(): """ reload kvm @@ -237,7 +257,7 @@ def main(argv): groups = check_groups(args['username']) lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) kvm_configure = check_kvm_configure(vendor) - + print check_packages_installed() if args['verify']: # code here verify = args['verify'] @@ -255,31 +275,9 @@ def main(argv): # fix configuration if (lago_env_dir == 'N'): change_permissions(envs_dir,username) - print "check permission: " + str(check_permissions(envs_dir,username)) - - + print "Check permission: " + str(check_permissions(envs_dir,username)) + if (groups == 'N'): + change_groups(username) + print "Check groups: " + str(check_groups(args['username'])) if __name__ == "__main__": main(sys.argv[1:]) - - - - - - -class Setup(object): - """ - Setup on configure parameters: - """ - - def __init__(self, username, envs_dir, groups, verify ): - """__init__ - Args: - username (str): username Lago was installed - envs_dir (str): DirectoryDefault dictonary to load, can be empty. - """ - - self.username = username - self.envs_dir = envs_dir - self.groups = groups - self.verify = verify - From 20267085de7bd5526836121e46304c032ff83890 Mon Sep 17 00:00:00 2001 From: Galit Date: Sun, 29 Apr 2018 21:25:31 +0300 Subject: [PATCH 05/22] added a check and change for missing packages --- lago/verify_configuration.py | 42 ++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index aaa8f725..77665c4b 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -35,13 +35,14 @@ import argparse import sys import getpass +import platform class VerifyLagoStatus(object): """ Verify configuration: """ verificationStatus = False - def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,verify_status): + def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status): #print('__init__ is the constructor for a class VerifyLagoStatus') self.username = username self.envs_dir = envs_dir @@ -50,6 +51,7 @@ def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kv self.virtualization = virtualization self.lago_env_dir = lago_env_dir self.kvm_configure = kvm_configure + self.install_pkg = install_pkg VerifyLagoStatus.verificationStatus = verify_status def displayLagoStatus(self): @@ -62,6 +64,7 @@ def displayLagoStatus(self): print "Groups: " + self.return_status(self.groups) print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "All packages installed: " + self.return_status(self.install_pkg) print "Status: " + str(VerifyLagoStatus.verificationStatus) if (VerifyLagoStatus.verificationStatus == False): print "Please read configuration setup:" @@ -76,6 +79,7 @@ def fixLagoConfiguration(self): print "Groups: " + self.return_status(self.groups) print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "All packages installed: " + self.return_status(self.install_pkg) print "Status: " + str(VerifyLagoStatus.verificationStatus) if (VerifyLagoStatus.verificationStatus == False): print "Please read configuration setup:" @@ -128,7 +132,6 @@ def check_kvm_configure(vendor): status = "Y" return status - def check_nested(vendor): mod="kvm_"+vendor cmd = "cat /sys/module/"+mod+"/parameters/nested" @@ -150,8 +153,8 @@ def check_groups(username): return 'N' def change_groups(username): - a = os.system("usermod -a -G qemu,libvirt,lago " + username) - b = os.system("usermod -a -G " + username + " qemu" ) + os.system("usermod -a -G qemu,libvirt,lago " + username) + os.system("usermod -a -G " + username + " qemu" ) def check_permissions(envs_dirs,username): status = True @@ -182,19 +185,21 @@ def change_permissions(envs_dirs,username): def check_packages_installed(): missing_pkg = [] status = "Y" - #yum install -y epel-release centos-release-qemu-ev - #yum install -y python-devel libvirt libvirt-devel \ - #libguestfs-tools libguestfs-devel gcc libffi-devel \ - #openssl-devel qemu-kvm-ev - #yum list installed {PACKAGE_NAME_HERE} + if platform.linux_distribution()[0] == "CentOS Linux": + pkg_list = ["girl","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + else: + pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] rpm_output = commands.getoutput("rpm -qa ") - for pkg in ["girl","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"]: + for pkg in pkg_list: if pkg not in rpm_output: missing_pkg.append(pkg) status = 'N' return (status,missing_pkg) - +def install_missing_packages(missing_pkg): + for pkg in missing_pkg: + os.system("yum install -y " + pkg) + def reload_kvm(): """ reload kvm @@ -215,7 +220,6 @@ def enable_services(): enable services """ def main(argv): - username = '' envs_dir = '' msg='' @@ -228,7 +232,6 @@ def main(argv): args = vars(parser.parse_args()) - if (args['verify'] == False) & (os.getuid() != 0): print "Please use 'sudo', you need adminstrator permissions for configuration" exit(1) @@ -257,7 +260,7 @@ def main(argv): groups = check_groups(args['username']) lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) kvm_configure = check_kvm_configure(vendor) - print check_packages_installed() + (install_pkg,missing_pkg) = check_packages_installed() if args['verify']: # code here verify = args['verify'] @@ -268,8 +271,8 @@ def main(argv): # virtualization .. msg ... # #virt-host-validate - verify_status = validate_status([groups,nested,virtualization,lago_env_dir]) - verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,verify_status) + verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) + verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status) verify.displayLagoStatus() else: # fix configuration @@ -279,5 +282,12 @@ def main(argv): if (groups == 'N'): change_groups(username) print "Check groups: " + str(check_groups(args['username'])) + if (install_pkg == 'N'): + install_missing_packages(missing_pkg) + print "Check missing packages: " + str(check_packages_installed()[0]) + # if nested + # update the value of LagoStatus + # reload libvirtd + if __name__ == "__main__": main(sys.argv[1:]) From 058d79d1f9bccecb3638ab1b0d6447321d00d84e Mon Sep 17 00:00:00 2001 From: Galit Date: Mon, 30 Apr 2018 06:21:04 +0300 Subject: [PATCH 06/22] add setup command to lago --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 493d2873..b4382dbd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,6 +49,7 @@ lago.plugins.cli = list=lago.cmd:do_list revert=lago.cmd:do_revert set-current=lago.workdir:set_current + setup=lago.cmd:do_setup shell=lago.cmd:do_shell snapshot=lago.cmd:do_snapshot start=lago.cmd:do_start From 8fa2ee2cca3c864b58f11a9634be137aca15de7f Mon Sep 17 00:00:00 2001 From: Galit Date: Mon, 30 Apr 2018 07:34:54 +0300 Subject: [PATCH 07/22] working on the setup command --- lago/cmd.py | 43 ++++++++++++++++++++++++++++++++++++ tests/functional/common.bash | 1 + 2 files changed, 44 insertions(+) diff --git a/lago/cmd.py b/lago/cmd.py index 06bc0de8..746f9d66 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -36,6 +36,7 @@ from lago.config import config from lago import (log_utils, workdir as lago_workdir, utils, lago_ansible) from lago.utils import (in_prefix, with_logging, LagoUserException) +#import lago.verify_configuration as setup LOGGER = logging.getLogger('cli') in_lago_prefix = in_prefix( @@ -768,6 +769,48 @@ def do_deploy(prefix, **kwargs): action='store_true', default=False, ) +##### +@in_lago_prefix +@with_logging +def do_setup( + prefix, vm_names, standalone, dst_dir, compress, init_file_name, + out_format, collect_only, without_threads, **kwargs +): + output = prefix.export_vms( + vm_names, standalone, dst_dir, compress, init_file_name, out_format, + collect_only, not without_threads + ) + if collect_only: + print(out_format.format(output)) + +@lago.plugins.cli.cli_plugin( + help='Verify that the machine runninh Lago is well configured and configure if needed' +) +@lago.plugins.cli.cli_plugin_add_argument( + '--username', + '-u', + help='Which user needs to be configured', + #default=running_user, + action='store', +) + +@lago.plugins.cli.cli_plugin_add_argument( + '--envs-dir', + '-e', + dest='envs_dir', + help='Which directory the qemu has access permissions', + default='/var/lib/lago', + action='store', +) + +@lago.plugins.cli.cli_plugin_add_argument( + '--verify', + '-v', + help='Return report that describes which configurations are OK, and which are not.', + action='store_true', +) + +###### def do_generate(verbose, **kwargs): print(config.get_ini(incl_unset=verbose)) diff --git a/tests/functional/common.bash b/tests/functional/common.bash index f9a8926f..f31ab838 100644 --- a/tests/functional/common.bash +++ b/tests/functional/common.bash @@ -9,6 +9,7 @@ VERBS=( deploy destroy init + setup shell snapshot start From 8185fb60b96119894fb6ce4ea07d64b4e7a51929 Mon Sep 17 00:00:00 2001 From: Galit Date: Mon, 30 Apr 2018 07:53:45 +0300 Subject: [PATCH 08/22] fixed the setup order of lines in the file --- lago/cmd.py | 45 ++++++++++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/lago/cmd.py b/lago/cmd.py index 746f9d66..b83c3329 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -761,27 +761,7 @@ def do_collect(prefix, output, no_skip, **kwargs): def do_deploy(prefix, **kwargs): prefix.deploy() - -@lago.plugins.cli.cli_plugin(help="Dump configuration file") -@lago.plugins.cli.cli_plugin_add_argument( - '--verbose', - help='Include parameters with no default value.', - action='store_true', - default=False, -) -##### -@in_lago_prefix -@with_logging -def do_setup( - prefix, vm_names, standalone, dst_dir, compress, init_file_name, - out_format, collect_only, without_threads, **kwargs -): - output = prefix.export_vms( - vm_names, standalone, dst_dir, compress, init_file_name, out_format, - collect_only, not without_threads - ) - if collect_only: - print(out_format.format(output)) +###### @lago.plugins.cli.cli_plugin( help='Verify that the machine runninh Lago is well configured and configure if needed' @@ -809,8 +789,31 @@ def do_setup( help='Return report that describes which configurations are OK, and which are not.', action='store_true', ) +@in_lago_prefix +@with_logging +def do_setup( + prefix, vm_names, standalone, dst_dir, compress, init_file_name, + out_format, collect_only, without_threads, **kwargs +): + output = prefix.export_vms( + vm_names, standalone, dst_dir, compress, init_file_name, out_format, + collect_only, not without_threads + ) + if collect_only: + print(out_format.format(output)) ###### + + + +@lago.plugins.cli.cli_plugin(help="Dump configuration file") +@lago.plugins.cli.cli_plugin_add_argument( + '--verbose', + help='Include parameters with no default value.', + action='store_true', + default=False, +) + def do_generate(verbose, **kwargs): print(config.get_ini(incl_unset=verbose)) From 0663330e95f2d6dbee2cfd2539cb9414ccce1aed Mon Sep 17 00:00:00 2001 From: Galit Date: Mon, 30 Apr 2018 11:31:31 +0300 Subject: [PATCH 09/22] fix all functions to support the lago setup, add doctype --- lago/cmd.py | 45 ++++++--- lago/verify_configuration.py | 172 ++++++++++++++++++++--------------- 2 files changed, 133 insertions(+), 84 deletions(-) diff --git a/lago/cmd.py b/lago/cmd.py index b83c3329..c3031002 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -37,6 +37,7 @@ from lago import (log_utils, workdir as lago_workdir, utils, lago_ansible) from lago.utils import (in_prefix, with_logging, LagoUserException) #import lago.verify_configuration as setup +from lago.verify_configuration import (fix_configuration, check_configuration, check_user, check_directory,validate_status, VerifyLagoStatus) LOGGER = logging.getLogger('cli') in_lago_prefix = in_prefix( @@ -68,7 +69,6 @@ '$PWD/.lago' ), metavar='WORKDIR', - type=os.path.abspath, nargs='?', ) @lago.plugins.cli.cli_plugin_add_argument( @@ -752,7 +752,6 @@ def do_copy_to_vm(prefix, host, remote_path, local_path, **kwargs): def do_collect(prefix, output, no_skip, **kwargs): prefix.collect_artifacts(output, ignore_nopath=not no_skip) - @lago.plugins.cli.cli_plugin( help='Run scripts that install necessary RPMs and configuration' ) @@ -762,7 +761,6 @@ def do_deploy(prefix, **kwargs): prefix.deploy() ###### - @lago.plugins.cli.cli_plugin( help='Verify that the machine runninh Lago is well configured and configure if needed' ) @@ -779,7 +777,8 @@ def do_deploy(prefix, **kwargs): '-e', dest='envs_dir', help='Which directory the qemu has access permissions', - default='/var/lib/lago', + default="/var/lib/lago", + type=os.path.abspath, action='store', ) @@ -792,19 +791,39 @@ def do_deploy(prefix, **kwargs): @in_lago_prefix @with_logging def do_setup( - prefix, vm_names, standalone, dst_dir, compress, init_file_name, - out_format, collect_only, without_threads, **kwargs + prefix, username, envs_dir, verify, **kwargs ): - output = prefix.export_vms( - vm_names, standalone, dst_dir, compress, init_file_name, out_format, - collect_only, not without_threads - ) - if collect_only: - print(out_format.format(output)) + msg_error = [] + if (username): + if (check_user(username)): + msg_error.append("Username doesn't exists " + username) + + if (envs_dir): + if (check_directory(envs_dir)): + msg_error.append("Directory doesn't exists "+ envs_dir) + + if ( msg_error ): + msg_error_str = '\n'.join(msg_error) + LOGGER.error("%s", msg_error_str) + sys.exit(2) -###### + (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) = check_configuration(username,envs_dir) + if (verify): + verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) + verify_lago = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status) + verify_lago.displayLagoStatus() + else: + if (os.getuid() != 0): + print("Please use 'sudo', you need adminstrator permissions for configuration") + else: + print("You have sudo permissions") + fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_dir,install_pkg) + (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) = check_configuration(username,envs_dir) + verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) + print("Verify status: " + str(verify_status)) +###### @lago.plugins.cli.cli_plugin(help="Dump configuration file") @lago.plugins.cli.cli_plugin_add_argument( diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 77665c4b..5c845cc2 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -39,11 +39,10 @@ class VerifyLagoStatus(object): """ - Verify configuration: + Verify Lago configuration """ verificationStatus = False def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status): - #print('__init__ is the constructor for a class VerifyLagoStatus') self.username = username self.envs_dir = envs_dir self.groups = groups @@ -55,6 +54,9 @@ def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kv VerifyLagoStatus.verificationStatus = verify_status def displayLagoStatus(self): + """ + Display Lago configuration status (OK/Not-OK) Verify Lago configuration + """ print "Configuration Status:" print "=====================" print "Username used by Lago: " + self.username @@ -74,6 +76,9 @@ def displayLagoStatus(self): return 0 def fixLagoConfiguration(self): + """ + Fix Lago configuration if possible + """ print "Nested: " + self.return_status(self.nested) print "Virtualization: " + self.return_status(self.virtualization) print "Groups: " + self.return_status(self.groups) @@ -86,18 +91,27 @@ def fixLagoConfiguration(self): print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" def return_status(self,status): + """ + Display OK or Not-OK + """ if status == 'Y': return "OK" else: - return "Not OK" + return "Not-OK" def validate_status(list_status): + """ + Validate the status of all configuration checks + """ status = True if 'N' in list_status : status = False return status def check_virtualization(): + """ + Check if KVM configure in BIOS + """ if os.system("dmesg | grep -q 'kvm: disabled by BIOS'"): virtualization = 'N' else: @@ -105,6 +119,9 @@ def check_virtualization(): return virtualization def get_cpu_vendor(): + """ + Get the CPU vendor ie. intel/amd + """ Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") if Input == 'GenuineIntel': vendor = "intel" @@ -117,6 +134,9 @@ def get_cpu_vendor(): return vendor def is_virtualization_enable(): + """ + Check if Virtualization enabled + """ res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") if res == "": status = "N" @@ -125,6 +145,9 @@ def is_virtualization_enable(): return status def check_kvm_configure(vendor): + """ + Check if KVM configure + """ res = commands.getoutput("lsmod | grep kvm_"+vendor) if res == "": status = "N" @@ -133,6 +156,9 @@ def check_kvm_configure(vendor): return status def check_nested(vendor): + """ + Check if nested is available + """ mod="kvm_"+vendor cmd = "cat /sys/module/"+mod+"/parameters/nested" is_enabled= commands.getoutput(cmd) @@ -142,6 +168,9 @@ def check_nested(vendor): return 'N' def check_groups(username): + """ + Check the groups are confiugre correct for LAGO + """ ## all groups username in groups_username = commands.getoutput("groups " + username) status_username = all(x in groups_username for x in ['qemu','libvirt','lago',username]) @@ -153,10 +182,16 @@ def check_groups(username): return 'N' def change_groups(username): + """ + Update the groups according to LAGO permissions + """ os.system("usermod -a -G qemu,libvirt,lago " + username) os.system("usermod -a -G " + username + " qemu" ) def check_permissions(envs_dirs,username): + """ + Check directory permissions + """ status = True uid = int(commands.getoutput("id -u " + username) ) gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) @@ -174,6 +209,9 @@ def check_permissions(envs_dirs,username): return 'N' def change_permissions(envs_dirs,username): + """ + Change directory permissions + """ uid = int(commands.getoutput("id -u " + username) ) gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) for dirpath, dirnames, filenames in os.walk(envs_dirs): @@ -183,10 +221,13 @@ def change_permissions(envs_dirs,username): os.chown(os.path.join(dirpath, filename), uid, gid) def check_packages_installed(): + """ + Check if all required packages are installed + """ missing_pkg = [] status = "Y" if platform.linux_distribution()[0] == "CentOS Linux": - pkg_list = ["girl","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + pkg_list = ["mysql-community-server","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] rpm_output = commands.getoutput("rpm -qa ") @@ -197,6 +238,9 @@ def check_packages_installed(): return (status,missing_pkg) def install_missing_packages(missing_pkg): + """ + Install missing packages + """ for pkg in missing_pkg: os.system("yum install -y " + pkg) @@ -219,75 +263,61 @@ def enable_services(): """ enable services """ -def main(argv): - username = '' - envs_dir = '' - msg='' - running_user=Input = getpass.getuser() - parser = argparse.ArgumentParser(description='Verify that the machine that Lago runs on is well configured') - #parser.add_argument('-u','--username', help='Description for foo argument', required=True) - parser.add_argument('-u','--username', help='Which user needs to be configured',default=running_user) - parser.add_argument('-e','--envs-dir', help='Which directory the qemu has access permissions', default='/var/lib/lago',dest='envs_dir') - parser.add_argument('-v','--verify', help='Return report that describes which configurations are OK, and which are not.', action='store_true') - - args = vars(parser.parse_args()) - if (args['verify'] == False) & (os.getuid() != 0): - print "Please use 'sudo', you need adminstrator permissions for configuration" - exit(1) - if args['username']: - # code here - username = args['username'] - uid = commands.getoutput("id -u " + username) - if ( uid == "no such user" ): - msg = "\'"+username+"\'"+ " username doesn't exists" - - if args['envs_dir']: - # code here - envs_dir = args['envs_dir'] - if (os.path.isdir(envs_dir)==False): - msg = "\'"+envs_dir+"\'"+ " envs_dir doesn't exists" +def reload_libvirtd(): + """ + reload libvirtd + """ + output = os.system("systemctl restart libvirtd") + print "Reload:"+ str(output) + - if (msg): - print "Error: " + msg - exit(1) +def check_user(username): + """ + Check if user exists in passwd + """ + msg="" + uid = commands.getoutput("id -u " + username) + if "no such user" in uid: + msg = "\'"+username+"\'"+ " username doesn't exists" + return msg + +def check_directory(envs_dir): + """ + Check if directory exists + """ + msg="" + if (os.path.isdir(envs_dir)==False): + msg = "\'"+envs_dir+"\'"+ " envs_dir doesn't exists" + return msg - ## check what is configure - vendor = get_cpu_vendor() - nested = check_nested(vendor) - #virtualization = check_virtualization() - virtualization = is_virtualization_enable() - groups = check_groups(args['username']) - lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) - kvm_configure = check_kvm_configure(vendor) - (install_pkg,missing_pkg) = check_packages_installed() - if args['verify']: - # code here - verify = args['verify'] - #print args['verify'] +def check_configuration(username,envs_dir): + """ + Check the configuration of LAGO (what is configure) + """ + vendor = get_cpu_vendor() + nested = check_nested(vendor) + #virtualization = check_virtualization() + virtualization = is_virtualization_enable() + groups = check_groups(username) + lago_env_dir = check_permissions(envs_dir,username) + kvm_configure = check_kvm_configure(vendor) + (install_pkg,missing_pkg) = check_packages_installed() + return (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) - # if not ok update .... - # Groups, Lago env, - # virtualization .. msg ... - # - #virt-host-validate - verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) - verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status) - verify.displayLagoStatus() - else: - # fix configuration - if (lago_env_dir == 'N'): - change_permissions(envs_dir,username) - print "Check permission: " + str(check_permissions(envs_dir,username)) - if (groups == 'N'): - change_groups(username) - print "Check groups: " + str(check_groups(args['username'])) - if (install_pkg == 'N'): - install_missing_packages(missing_pkg) - print "Check missing packages: " + str(check_packages_installed()[0]) - # if nested - # update the value of LagoStatus - # reload libvirtd +def fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_dir,install_pkg): + """ + Fix configuration, if possible + """ + if (lago_env_dir == 'N'): + change_permissions(envs_dir,username) + if (groups == 'N'): + change_groups(username) -if __name__ == "__main__": - main(sys.argv[1:]) + if (install_pkg == 'N'): + print "Check missing packages: " + install_missing_packages(missing_pkg) + reload_libvirtd() + # if nested + # update the value of LagoStatus + # reload libvirtd From fc43bd58df9a068f3f0505f0bde7ff547497b3fb Mon Sep 17 00:00:00 2001 From: Galit Date: Mon, 30 Apr 2018 18:13:12 +0300 Subject: [PATCH 10/22] added information to the fix configuration doctype --- lago/verify_configuration.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 5c845cc2..7f5f9643 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -308,6 +308,11 @@ def check_configuration(username,envs_dir): def fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_dir,install_pkg): """ Fix configuration, if possible + - file permissions + - groups + - packages + - nested + - kvm virtualization """ if (lago_env_dir == 'N'): change_permissions(envs_dir,username) From 8842a057980532dded1b457ad7608e3995033fca Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 11:36:41 +0300 Subject: [PATCH 11/22] change datatype to dictionary, update functionality --- lago/cmd.py | 34 +- lago/verify_configuration.py | 185 ++-- lago/verify_configure.py.bck | 293 ++++++ pbr-0.43.11-py2.7.egg/EGG-INFO/PKG-INFO | 67 ++ pbr-0.43.11-py2.7.egg/EGG-INFO/SOURCES.txt | 98 ++ .../EGG-INFO/dependency_links.txt | 1 + .../EGG-INFO/entry_points.txt | 9 + pbr-0.43.11-py2.7.egg/EGG-INFO/not-zip-safe | 1 + pbr-0.43.11-py2.7.egg/EGG-INFO/top_level.txt | 1 + pbr-0.43.11-py2.7.egg/pbr/__init__.py | 0 pbr-0.43.11-py2.7.egg/pbr/builddoc.py | 272 +++++ pbr-0.43.11-py2.7.egg/pbr/cmd/__init__.py | 0 pbr-0.43.11-py2.7.egg/pbr/cmd/main.py | 112 +++ pbr-0.43.11-py2.7.egg/pbr/core.py | 134 +++ pbr-0.43.11-py2.7.egg/pbr/extra_files.py | 35 + pbr-0.43.11-py2.7.egg/pbr/find_package.py | 29 + pbr-0.43.11-py2.7.egg/pbr/git.py | 331 ++++++ pbr-0.43.11-py2.7.egg/pbr/hooks/__init__.py | 28 + pbr-0.43.11-py2.7.egg/pbr/hooks/backwards.py | 33 + pbr-0.43.11-py2.7.egg/pbr/hooks/base.py | 34 + pbr-0.43.11-py2.7.egg/pbr/hooks/commands.py | 66 ++ pbr-0.43.11-py2.7.egg/pbr/hooks/files.py | 103 ++ pbr-0.43.11-py2.7.egg/pbr/hooks/metadata.py | 32 + pbr-0.43.11-py2.7.egg/pbr/options.py | 53 + pbr-0.43.11-py2.7.egg/pbr/packaging.py | 823 +++++++++++++++ pbr-0.43.11-py2.7.egg/pbr/pbr_json.py | 34 + pbr-0.43.11-py2.7.egg/pbr/testr_command.py | 167 ++++ pbr-0.43.11-py2.7.egg/pbr/tests/__init__.py | 26 + pbr-0.43.11-py2.7.egg/pbr/tests/base.py | 221 +++++ .../pbr/tests/test_commands.py | 84 ++ pbr-0.43.11-py2.7.egg/pbr/tests/test_core.py | 151 +++ pbr-0.43.11-py2.7.egg/pbr/tests/test_files.py | 78 ++ pbr-0.43.11-py2.7.egg/pbr/tests/test_hooks.py | 72 ++ .../pbr/tests/test_integration.py | 269 +++++ .../pbr/tests/test_packaging.py | 939 ++++++++++++++++++ .../pbr/tests/test_pbr_json.py | 30 + pbr-0.43.11-py2.7.egg/pbr/tests/test_setup.py | 445 +++++++++ pbr-0.43.11-py2.7.egg/pbr/tests/test_util.py | 91 ++ .../pbr/tests/test_version.py | 311 ++++++ pbr-0.43.11-py2.7.egg/pbr/tests/test_wsgi.py | 163 +++ .../pbr/tests/testpackage/CHANGES.txt | 86 ++ .../pbr/tests/testpackage/LICENSE.txt | 29 + .../pbr/tests/testpackage/MANIFEST.in | 2 + .../pbr/tests/testpackage/README.txt | 148 +++ .../pbr/tests/testpackage/data_files/a.txt | 0 .../pbr/tests/testpackage/data_files/b.txt | 0 .../pbr/tests/testpackage/data_files/c.rst | 0 .../pbr/tests/testpackage/doc/source/conf.py | 74 ++ .../tests/testpackage/doc/source/index.rst | 23 + .../testpackage/doc/source/installation.rst | 12 + .../tests/testpackage/doc/source/usage.rst | 7 + .../pbr/tests/testpackage/extra-file.txt | 0 .../pbr/tests/testpackage/git-extra-file.txt | 0 .../testpackage/pbr_testpackage/__init__.py | 3 + .../pbr_testpackage/_setup_hooks.py | 65 ++ .../tests/testpackage/pbr_testpackage/cmd.py | 26 + .../testpackage/pbr_testpackage/extra.py | 0 .../pbr_testpackage/package_data/1.txt | 0 .../pbr_testpackage/package_data/2.txt | 0 .../tests/testpackage/pbr_testpackage/wsgi.py | 40 + .../pbr/tests/testpackage/setup.cfg | 58 ++ .../pbr/tests/testpackage/setup.py | 22 + .../pbr/tests/testpackage/src/testext.c | 29 + .../tests/testpackage/test-requirements.txt | 2 + pbr-0.43.11-py2.7.egg/pbr/tests/util.py | 78 ++ pbr-0.43.11-py2.7.egg/pbr/util.py | 609 ++++++++++++ pbr-0.43.11-py2.7.egg/pbr/version.py | 483 +++++++++ 67 files changed, 7583 insertions(+), 68 deletions(-) create mode 100644 lago/verify_configure.py.bck create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/PKG-INFO create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/SOURCES.txt create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/dependency_links.txt create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/entry_points.txt create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/not-zip-safe create mode 100644 pbr-0.43.11-py2.7.egg/EGG-INFO/top_level.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/__init__.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/builddoc.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/cmd/__init__.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/cmd/main.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/core.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/extra_files.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/find_package.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/git.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/__init__.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/backwards.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/base.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/commands.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/files.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/hooks/metadata.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/options.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/packaging.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/pbr_json.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/testr_command.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/__init__.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/base.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_commands.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_core.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_files.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_hooks.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_integration.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_packaging.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_pbr_json.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_setup.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_util.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_version.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/test_wsgi.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/CHANGES.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/LICENSE.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/MANIFEST.in create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/README.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/a.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/b.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/c.rst create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/conf.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/index.rst create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/installation.rst create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/usage.rst create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/extra-file.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/git-extra-file.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/__init__.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/cmd.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/extra.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/wsgi.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.cfg create mode 100755 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/src/testext.c create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/test-requirements.txt create mode 100644 pbr-0.43.11-py2.7.egg/pbr/tests/util.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/util.py create mode 100644 pbr-0.43.11-py2.7.egg/pbr/version.py diff --git a/lago/cmd.py b/lago/cmd.py index c3031002..49eb297b 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -797,6 +797,8 @@ def do_setup( if (username): if (check_user(username)): msg_error.append("Username doesn't exists " + username) + else: + username = os.getenv("SUDO_USER") or os.getenv("USER") if (envs_dir): if (check_directory(envs_dir)): @@ -805,23 +807,35 @@ def do_setup( if ( msg_error ): msg_error_str = '\n'.join(msg_error) LOGGER.error("%s", msg_error_str) - sys.exit(2) - - (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) = check_configuration(username,envs_dir) + sys.exit(1) + config_dict = check_configuration(username,envs_dir) + (verify_status,list_not_configure) = validate_status(config_dict) + verify_lago = VerifyLagoStatus(username,envs_dir,config_dict,verify_status) + if (verify): - verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) - verify_lago = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status) verify_lago.displayLagoStatus() + if verify_status: + sys.exit(0) + else: + sys.exit(2) else: if (os.getuid() != 0): print("Please use 'sudo', you need adminstrator permissions for configuration") + sys.exit(1) else: - print("You have sudo permissions") - fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_dir,install_pkg) - (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) = check_configuration(username,envs_dir) - verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) - print("Verify status: " + str(verify_status)) + # verify_lago.displayLagoStatus() + fix_configuration(username,envs_dir,config_dict) + config_dict = check_configuration(username,envs_dir) + (verify_status,list_not_configure) = validate_status(config_dict) + verify_lago.fixLagoConfiguration(config_dict,verify_status) + # verify_lago.displayLagoStatus() + LOGGER.error("Problem to configure: %s", str(list_not_configure)) + + if verify_status: + sys.exit(0) + else: + sys.exit(2) ###### diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 7f5f9643..0c9136c0 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -30,6 +30,8 @@ # owner USERNAME:USERNAME # systemctl restart libvirtd + + import os import commands import argparse @@ -42,15 +44,17 @@ class VerifyLagoStatus(object): Verify Lago configuration """ verificationStatus = False - def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status): + def __init__(self,username,envs_dir,config_dict,verify_status): self.username = username self.envs_dir = envs_dir - self.groups = groups - self.nested = nested - self.virtualization = virtualization - self.lago_env_dir = lago_env_dir - self.kvm_configure = kvm_configure - self.install_pkg = install_pkg + self.groups = config_dict['groups'] + self.nested = config_dict['nested'] + self.virtualization = config_dict['virtualization'] + self.lago_env_dir = config_dict['lago_env_dir'] + self.kvm_configure = config_dict['kvm_configure'] + self.install_pkg = config_dict['install_pkg'] + self.home_permissions = config_dict['home_permissions'] + self.ipv6_networking = config_dict['ipv6_networking'] VerifyLagoStatus.verificationStatus = verify_status def displayLagoStatus(self): @@ -67,6 +71,9 @@ def displayLagoStatus(self): print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) print "Kvm Configure: " + self.return_status(self.kvm_configure) print "All packages installed: " + self.return_status(self.install_pkg) + print "Home Directory permissions: " + self.return_status(self.home_permissions) + print "IPV6 configure: " + self.return_status(self.ipv6_networking) + print "Status: " + str(VerifyLagoStatus.verificationStatus) if (VerifyLagoStatus.verificationStatus == False): print "Please read configuration setup:" @@ -75,21 +82,21 @@ def displayLagoStatus(self): else: return 0 - def fixLagoConfiguration(self): + def fixLagoConfiguration(self,config_dict,verify_status): """ Fix Lago configuration if possible """ - print "Nested: " + self.return_status(self.nested) - print "Virtualization: " + self.return_status(self.virtualization) - print "Groups: " + self.return_status(self.groups) - print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) - print "Kvm Configure: " + self.return_status(self.kvm_configure) - print "All packages installed: " + self.return_status(self.install_pkg) - print "Status: " + str(VerifyLagoStatus.verificationStatus) - if (VerifyLagoStatus.verificationStatus == False): - print "Please read configuration setup:" - print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + self.groups = config_dict['groups'] + self.nested = config_dict['nested'] + self.virtualization = config_dict['virtualization'] + self.lago_env_dir = config_dict['lago_env_dir'] + self.kvm_configure = config_dict['kvm_configure'] + self.install_pkg = config_dict['install_pkg'] + self.home_permissions = config_dict['home_permissions'] + self.ipv6_networking = config_dict['ipv6_networking'] + VerifyLagoStatus.verificationStatus = verify_status + def return_status(self,status): """ Display OK or Not-OK @@ -104,9 +111,12 @@ def validate_status(list_status): Validate the status of all configuration checks """ status = True - if 'N' in list_status : + list_not_configure=[] + if 'N' in list_status.itervalues(): status = False - return status + list_not_configure = [k for k,v in list_status.iteritems() if v == 'N'] + + return status,list_not_configure def check_virtualization(): """ @@ -188,6 +198,37 @@ def change_groups(username): os.system("usermod -a -G qemu,libvirt,lago " + username) os.system("usermod -a -G " + username + " qemu" ) +def check_home_dir_permmisions(): + import stat + _USERNAME = os.getenv("SUDO_USER") or os.getenv("USER") + _HOME = os.path.expanduser('~'+_USERNAME) + mode = os.stat(_HOME).st_mode + group_exe = (stat.S_IMODE(mode) & stat.S_IXGRP != stat.S_IXGRP) + if group_exe: + return "N" + else: + return "Y" + +def change_home_dir_permissions(): + _USERNAME = os.getenv("SUDO_USER") or os.getenv("USER") + _HOME = os.path.expanduser('~'+_USERNAME) + os.system("chmod g+x " + _HOME ) + +def remove_write_permissions(path): + """Remove write permissions from this path, while keeping all other permissions intact. + + Params: + path: The path whose permissions to alter. + """ + NO_USER_WRITING = ~stat.S_IWUSR + NO_GROUP_WRITING = ~stat.S_IWGRP + NO_OTHER_WRITING = ~stat.S_IWOTH + NO_WRITING = NO_USER_WRITING & NO_GROUP_WRITING & NO_OTHER_WRITING + + current_permissions = stat.S_IMODE(os.lstat(path).st_mode) + os.chmod(path, current_permissions & NO_WRITING) + + def check_permissions(envs_dirs,username): """ Check directory permissions @@ -244,33 +285,41 @@ def install_missing_packages(missing_pkg): for pkg in missing_pkg: os.system("yum install -y " + pkg) -def reload_kvm(): - """ - reload kvm - """ +def enable_nested(vendor): + print "Enabling nested virtualization..." + filename = "/etc/modprobe.d/kvm-" + vendor + ".conf" + file = open(filename,"a") + file.write("options kvm-" + vendor + " nested=y" ) + file.close() -def reload_nested(): +def reload_kvm(vendor): """ reload kvm - """ - -def enable_service(): + """ + mod = "kvm-" + vendor + print "Reloading kvm kernel module" + os.system("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) + +def enable_service(service): """ enable service """ + os.system("systemctl enable " + service + "; systemctl restart " + service ) -def enable_services(): - """ - enable services - """ -def reload_libvirtd(): - """ - reload libvirtd - """ - output = os.system("systemctl restart libvirtd") - print "Reload:"+ str(output) - +def check_configure_ipv6_networking(): + with open('/etc/sysctl.conf', 'r') as content_file: + content = content_file.read() + if "net.ipv6.conf.all.accept_ra=2" in content: + return 'Y' + else: + return 'N' + +def configure_ipv6_networking(): + file = open("/etc/sysctl.conf","a") + file.write("net.ipv6.conf.all.accept_ra=2" ) + file.close() + os.system("sysctl -p") def check_user(username): """ @@ -295,17 +344,21 @@ def check_configuration(username,envs_dir): """ Check the configuration of LAGO (what is configure) """ - vendor = get_cpu_vendor() - nested = check_nested(vendor) + config_dict={} + config_dict['vendor'] = get_cpu_vendor() + config_dict['nested'] = check_nested(config_dict['vendor']) #virtualization = check_virtualization() - virtualization = is_virtualization_enable() - groups = check_groups(username) - lago_env_dir = check_permissions(envs_dir,username) - kvm_configure = check_kvm_configure(vendor) - (install_pkg,missing_pkg) = check_packages_installed() - return (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg) - -def fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_dir,install_pkg): + config_dict['virtualization'] = is_virtualization_enable() + config_dict['groups'] = check_groups(username) + config_dict['lago_env_dir'] = check_permissions(envs_dir,username) + config_dict['kvm_configure'] = check_kvm_configure(config_dict['vendor']) + (config_dict['install_pkg'],missing_pkg) = check_packages_installed() + config_dict['home_permissions'] = check_home_dir_permmisions() + config_dict['ipv6_networking'] = check_configure_ipv6_networking() + #return (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,home_permissions,ipv6_networking) + return config_dict + +def fix_configuration(username,envs_dir,config_dict): """ Fix configuration, if possible - file permissions @@ -314,15 +367,31 @@ def fix_configuration(username,envs_dir,groups,nested,virtualization,lago_env_di - nested - kvm virtualization """ - if (lago_env_dir == 'N'): + if (config_dict['lago_env_dir'] == 'N'): + print "Trying to fix env_dir permissions... " change_permissions(envs_dir,username) - if (groups == 'N'): + + if (config_dict['groups'] == 'N'): + print "Trying to fix group permissions... " change_groups(username) - if (install_pkg == 'N'): - print "Check missing packages: " - install_missing_packages(missing_pkg) - reload_libvirtd() - # if nested - # update the value of LagoStatus - # reload libvirtd + if (config_dict['install_pkg'] == 'N'): + print "Trying to fix missing packages... " + # (install_pkg,missing_pkg) = check_packages_installed() + # install_missing_packages(missing_pkg) + + if (config_dict['home_permissions'] == 'N'): + print "Trying to fix home permissions... " + change_home_dir_permissions() + + if (config_dict['ipv6_networking'] == 'N'): + print "Trying to fix ipv6 configuration... " + configure_ipv6_networking() + + vendor = get_cpu_vendor() + if (config_dict['nested'] == 'N'): + print "Trying to enable nested ... " + enable_nested(vendor) + reload_kvm(vendor) + + enable_service("libvirtd") diff --git a/lago/verify_configure.py.bck b/lago/verify_configure.py.bck new file mode 100644 index 00000000..77665c4b --- /dev/null +++ b/lago/verify_configure.py.bck @@ -0,0 +1,293 @@ +# +# Copyright 2014 Red Hat, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# Refer to the README and COPYING files for full details of the license +# + +# permissions +# group +# and configure +# ask the user to run with sudo + +#groups qemu,libvirt - USERNAME +#groups USERNAME - qemu + +#/var/lib/lago +# owner USERNAME:USERNAME +# systemctl restart libvirtd + +import os +import commands +import argparse +import sys +import getpass +import platform + +class VerifyLagoStatus(object): + """ + Verify configuration: + """ + verificationStatus = False + def __init__(self,username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status): + #print('__init__ is the constructor for a class VerifyLagoStatus') + self.username = username + self.envs_dir = envs_dir + self.groups = groups + self.nested = nested + self.virtualization = virtualization + self.lago_env_dir = lago_env_dir + self.kvm_configure = kvm_configure + self.install_pkg = install_pkg + VerifyLagoStatus.verificationStatus = verify_status + + def displayLagoStatus(self): + print "Configuration Status:" + print "=====================" + print "Username used by Lago: " + self.username + print "Environment directory used by Lago: " + self.envs_dir + print "Nested: " + self.return_status(self.nested) + print "Virtualization: " + self.return_status(self.virtualization) + print "Groups: " + self.return_status(self.groups) + print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) + print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "All packages installed: " + self.return_status(self.install_pkg) + print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (VerifyLagoStatus.verificationStatus == False): + print "Please read configuration setup:" + print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + return 2 + else: + return 0 + + def fixLagoConfiguration(self): + print "Nested: " + self.return_status(self.nested) + print "Virtualization: " + self.return_status(self.virtualization) + print "Groups: " + self.return_status(self.groups) + print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) + print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "All packages installed: " + self.return_status(self.install_pkg) + print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (VerifyLagoStatus.verificationStatus == False): + print "Please read configuration setup:" + print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" + + def return_status(self,status): + if status == 'Y': + return "OK" + else: + return "Not OK" + +def validate_status(list_status): + status = True + if 'N' in list_status : + status = False + return status + +def check_virtualization(): + if os.system("dmesg | grep -q 'kvm: disabled by BIOS'"): + virtualization = 'N' + else: + virtualization = 'Y' + return virtualization + +def get_cpu_vendor(): + Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") + if Input == 'GenuineIntel': + vendor = "intel" + elif vendor == 'AuthenticAMD': + #print "amd" + vendor = "amd" + else: + #print "unrecognized CPU vendor: $vendor, only Intel/AMD are supported" + vendor = "problem" + return vendor + +def is_virtualization_enable(): + res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") + if res == "": + status = "N" + else: + status = "Y" + return status + +def check_kvm_configure(vendor): + res = commands.getoutput("lsmod | grep kvm_"+vendor) + if res == "": + status = "N" + else: + status = "Y" + return status + +def check_nested(vendor): + mod="kvm_"+vendor + cmd = "cat /sys/module/"+mod+"/parameters/nested" + is_enabled= commands.getoutput(cmd) + if is_enabled == 'Y': + return 'Y' + else: + return 'N' + +def check_groups(username): + ## all groups username in + groups_username = commands.getoutput("groups " + username) + status_username = all(x in groups_username for x in ['qemu','libvirt','lago',username]) + groups_qemu = commands.getoutput("groups qemu") + status_qemu = all(x in groups_qemu for x in [username]) + if ( status_username & status_qemu ): + return 'Y' + else: + return 'N' + +def change_groups(username): + os.system("usermod -a -G qemu,libvirt,lago " + username) + os.system("usermod -a -G " + username + " qemu" ) + +def check_permissions(envs_dirs,username): + status = True + uid = int(commands.getoutput("id -u " + username) ) + gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) + + for dirpath, dirnames, filenames in os.walk(envs_dirs): + for dirname in dirnames: + if ( os.stat(os.path.join(dirpath, dirname)).st_uid != uid ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != gid): + status = False + for filename in filenames: + if ( os.stat(os.path.join(dirpath, filename)).st_uid != uid ) & (os.stat(os.path.join(dirpath, filename)).st_gid != gid): + status = False + if ( status ): + return 'Y' + else: + return 'N' + +def change_permissions(envs_dirs,username): + uid = int(commands.getoutput("id -u " + username) ) + gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) + for dirpath, dirnames, filenames in os.walk(envs_dirs): + for dirname in dirnames: + os.chown(os.path.join(dirpath, dirname), uid, gid) + for filename in filenames: + os.chown(os.path.join(dirpath, filename), uid, gid) + +def check_packages_installed(): + missing_pkg = [] + status = "Y" + if platform.linux_distribution()[0] == "CentOS Linux": + pkg_list = ["girl","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + else: + pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] + rpm_output = commands.getoutput("rpm -qa ") + for pkg in pkg_list: + if pkg not in rpm_output: + missing_pkg.append(pkg) + status = 'N' + return (status,missing_pkg) + +def install_missing_packages(missing_pkg): + for pkg in missing_pkg: + os.system("yum install -y " + pkg) + +def reload_kvm(): + """ + reload kvm + """ + +def reload_nested(): + """ + reload kvm + """ + +def enable_service(): + """ + enable service + """ + +def enable_services(): + """ + enable services + """ +def main(argv): + username = '' + envs_dir = '' + msg='' + running_user=Input = getpass.getuser() + parser = argparse.ArgumentParser(description='Verify that the machine that Lago runs on is well configured') + #parser.add_argument('-u','--username', help='Description for foo argument', required=True) + parser.add_argument('-u','--username', help='Which user needs to be configured',default=running_user) + parser.add_argument('-e','--envs-dir', help='Which directory the qemu has access permissions', default='/var/lib/lago',dest='envs_dir') + parser.add_argument('-v','--verify', help='Return report that describes which configurations are OK, and which are not.', action='store_true') + + args = vars(parser.parse_args()) + + if (args['verify'] == False) & (os.getuid() != 0): + print "Please use 'sudo', you need adminstrator permissions for configuration" + exit(1) + if args['username']: + # code here + username = args['username'] + uid = commands.getoutput("id -u " + username) + if ( uid == "no such user" ): + msg = "\'"+username+"\'"+ " username doesn't exists" + + if args['envs_dir']: + # code here + envs_dir = args['envs_dir'] + if (os.path.isdir(envs_dir)==False): + msg = "\'"+envs_dir+"\'"+ " envs_dir doesn't exists" + + if (msg): + print "Error: " + msg + exit(1) + + ## check what is configure + vendor = get_cpu_vendor() + nested = check_nested(vendor) + #virtualization = check_virtualization() + virtualization = is_virtualization_enable() + groups = check_groups(args['username']) + lago_env_dir = check_permissions(args['envs_dir'] ,args['username']) + kvm_configure = check_kvm_configure(vendor) + (install_pkg,missing_pkg) = check_packages_installed() + if args['verify']: + # code here + verify = args['verify'] + #print args['verify'] + + # if not ok update .... + # Groups, Lago env, + # virtualization .. msg ... + # + #virt-host-validate + verify_status = validate_status([groups,nested,virtualization,lago_env_dir,install_pkg]) + verify = VerifyLagoStatus(username,envs_dir,groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,verify_status) + verify.displayLagoStatus() + else: + # fix configuration + if (lago_env_dir == 'N'): + change_permissions(envs_dir,username) + print "Check permission: " + str(check_permissions(envs_dir,username)) + if (groups == 'N'): + change_groups(username) + print "Check groups: " + str(check_groups(args['username'])) + if (install_pkg == 'N'): + install_missing_packages(missing_pkg) + print "Check missing packages: " + str(check_packages_installed()[0]) + # if nested + # update the value of LagoStatus + # reload libvirtd + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/PKG-INFO b/pbr-0.43.11-py2.7.egg/EGG-INFO/PKG-INFO new file mode 100644 index 00000000..538b5606 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,67 @@ +Metadata-Version: 1.1 +Name: pbr +Version: 0.43.11 +Summary: Python Build Reasonableness +Home-page: https://docs.openstack.org/pbr/latest/ +Author: OpenStack +Author-email: openstack-dev@lists.openstack.org +License: UNKNOWN +Description: Introduction + ============ + + .. image:: https://img.shields.io/pypi/v/pbr.svg + :target: https://pypi.python.org/pypi/pbr/ + :alt: Latest Version + + .. image:: https://img.shields.io/pypi/dm/pbr.svg + :target: https://pypi.python.org/pypi/pbr/ + :alt: Downloads + + PBR is a library that injects some useful and sensible default behaviors + into your setuptools run. It started off life as the chunks of code that + were copied between all of the `OpenStack`_ projects. Around the time that + OpenStack hit 18 different projects each with at least 3 active branches, + it seemed like a good time to make that code into a proper reusable library. + + PBR is only mildly configurable. The basic idea is that there's a decent + way to run things and if you do, you should reap the rewards, because then + it's simple and repeatable. If you want to do things differently, cool! But + you've already got the power of Python at your fingertips, so you don't + really need PBR. + + PBR builds on top of the work that `d2to1`_ started to provide for declarative + configuration. `d2to1`_ is itself an implementation of the ideas behind + `distutils2`_. Although `distutils2`_ is now abandoned in favor of work towards + `PEP 426`_ and Metadata 2.0, declarative config is still a great idea and + specifically important in trying to distribute setup code as a library + when that library itself will alter how the setup is processed. As Metadata + 2.0 and other modern Python packaging PEPs come out, PBR aims to support + them as quickly as possible. + + * License: Apache License, Version 2.0 + * Documentation: https://docs.openstack.org/pbr/latest/ + * Source: https://git.openstack.org/cgit/openstack-dev/pbr + * Bugs: https://bugs.launchpad.net/pbr + * Change Log: https://docs.openstack.org/pbr/latest/user/history.html + + .. _d2to1: https://pypi.python.org/pypi/d2to1 + .. _distutils2: https://pypi.python.org/pypi/Distutils2 + .. _PEP 426: http://legacy.python.org/dev/peps/pep-0426/ + .. _OpenStack: https://www.openstack.org/ + + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: OpenStack +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/SOURCES.txt b/pbr-0.43.11-py2.7.egg/EGG-INFO/SOURCES.txt new file mode 100644 index 00000000..e2f9588b --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/SOURCES.txt @@ -0,0 +1,98 @@ +.coveragerc +.mailmap +.testr.conf +.zuul.yaml +AUTHORS +CONTRIBUTING.rst +ChangeLog +LICENSE +README.rst +lower-constraints.txt +setup.cfg +setup.py +test-requirements.txt +tox.ini +doc/source/conf.py +doc/source/index.rst +doc/source/contributor/index.rst +doc/source/reference/index.rst +doc/source/user/compatibility.rst +doc/source/user/features.rst +doc/source/user/history.rst +doc/source/user/index.rst +doc/source/user/packagers.rst +doc/source/user/semver.rst +doc/source/user/using.rst +pbr/__init__.py +pbr/builddoc.py +pbr/core.py +pbr/extra_files.py +pbr/find_package.py +pbr/git.py +pbr/options.py +pbr/packaging.py +pbr/pbr_json.py +pbr/testr_command.py +pbr/util.py +pbr/version.py +pbr.egg-info/PKG-INFO +pbr.egg-info/SOURCES.txt +pbr.egg-info/dependency_links.txt +pbr.egg-info/entry_points.txt +pbr.egg-info/not-zip-safe +pbr.egg-info/top_level.txt +pbr/cmd/__init__.py +pbr/cmd/main.py +pbr/hooks/__init__.py +pbr/hooks/backwards.py +pbr/hooks/base.py +pbr/hooks/commands.py +pbr/hooks/files.py +pbr/hooks/metadata.py +pbr/tests/__init__.py +pbr/tests/base.py +pbr/tests/test_commands.py +pbr/tests/test_core.py +pbr/tests/test_files.py +pbr/tests/test_hooks.py +pbr/tests/test_integration.py +pbr/tests/test_packaging.py +pbr/tests/test_pbr_json.py +pbr/tests/test_setup.py +pbr/tests/test_util.py +pbr/tests/test_version.py +pbr/tests/test_wsgi.py +pbr/tests/util.py +pbr/tests/testpackage/CHANGES.txt +pbr/tests/testpackage/LICENSE.txt +pbr/tests/testpackage/MANIFEST.in +pbr/tests/testpackage/README.txt +pbr/tests/testpackage/extra-file.txt +pbr/tests/testpackage/git-extra-file.txt +pbr/tests/testpackage/setup.cfg +pbr/tests/testpackage/setup.py +pbr/tests/testpackage/test-requirements.txt +pbr/tests/testpackage/data_files/a.txt +pbr/tests/testpackage/data_files/b.txt +pbr/tests/testpackage/data_files/c.rst +pbr/tests/testpackage/doc/source/conf.py +pbr/tests/testpackage/doc/source/index.rst +pbr/tests/testpackage/doc/source/installation.rst +pbr/tests/testpackage/doc/source/usage.rst +pbr/tests/testpackage/pbr_testpackage/__init__.py +pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py +pbr/tests/testpackage/pbr_testpackage/cmd.py +pbr/tests/testpackage/pbr_testpackage/extra.py +pbr/tests/testpackage/pbr_testpackage/wsgi.py +pbr/tests/testpackage/pbr_testpackage/package_data/1.txt +pbr/tests/testpackage/pbr_testpackage/package_data/2.txt +pbr/tests/testpackage/src/testext.c +releasenotes/notes/deprecate-pyN-requirements-364655c38fa5b780.yaml +releasenotes/notes/deprecate-testr-nose-integration-56e3e11248d946fc.yaml +releasenotes/notes/remove-command-hooks-907d9c2325f306ca.yaml +releasenotes/notes/v_version-457b38c8679c5868.yaml +releasenotes/source/conf.py +releasenotes/source/index.rst +releasenotes/source/unreleased.rst +tools/integration.sh +tools/tox_releasenotes.sh \ No newline at end of file diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/dependency_links.txt b/pbr-0.43.11-py2.7.egg/EGG-INFO/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/entry_points.txt b/pbr-0.43.11-py2.7.egg/EGG-INFO/entry_points.txt new file mode 100644 index 00000000..f6ab6c9b --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,9 @@ +[distutils.setup_keywords] +pbr = pbr.core:pbr + +[egg_info.writers] +pbr.json = pbr.pbr_json:write_pbr_json + +[console_scripts] +pbr = pbr.cmd.main:main + diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/not-zip-safe b/pbr-0.43.11-py2.7.egg/EGG-INFO/not-zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/not-zip-safe @@ -0,0 +1 @@ + diff --git a/pbr-0.43.11-py2.7.egg/EGG-INFO/top_level.txt b/pbr-0.43.11-py2.7.egg/EGG-INFO/top_level.txt new file mode 100644 index 00000000..1d45dc6e --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/EGG-INFO/top_level.txt @@ -0,0 +1 @@ +pbr diff --git a/pbr-0.43.11-py2.7.egg/pbr/__init__.py b/pbr-0.43.11-py2.7.egg/pbr/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/builddoc.py b/pbr-0.43.11-py2.7.egg/pbr/builddoc.py new file mode 100644 index 00000000..167d13e0 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/builddoc.py @@ -0,0 +1,272 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from distutils import log +import fnmatch +import os +import sys + +try: + import cStringIO +except ImportError: + import io as cStringIO + +try: + import sphinx + # NOTE(dhellmann): Newer versions of Sphinx have moved the apidoc + # module into sphinx.ext and the API is slightly different (the + # function expects sys.argv[1:] instead of sys.argv[:]. So, figure + # out where we can import it from and set a flag so we can invoke + # it properly. See this change in sphinx for details: + # https://github.com/sphinx-doc/sphinx/commit/87630c8ae8bff8c0e23187676e6343d8903003a6 + try: + from sphinx.ext import apidoc + apidoc_use_padding = False + except ImportError: + from sphinx import apidoc + apidoc_use_padding = True + from sphinx import application + from sphinx import setup_command +except Exception as e: + # NOTE(dhellmann): During the installation of docutils, setuptools + # tries to import pbr code to find the egg_info.writer hooks. That + # imports this module, which imports sphinx, which imports + # docutils, which is being installed. Because docutils uses 2to3 + # to convert its code during installation under python 3, the + # import fails, but it fails with an error other than ImportError + # (today it's a NameError on StandardError, an exception base + # class). Convert the exception type here so it can be caught in + # packaging.py where we try to determine if we can import and use + # sphinx by importing this module. See bug #1403510 for details. + raise ImportError(str(e)) +from pbr import git +from pbr import options +from pbr import version + + +_rst_template = """%(heading)s +%(underline)s + +.. automodule:: %(module)s + :members: + :undoc-members: + :show-inheritance: +""" + + +def _find_modules(arg, dirname, files): + for filename in files: + if filename.endswith('.py') and filename != '__init__.py': + arg["%s.%s" % (dirname.replace('/', '.'), + filename[:-3])] = True + + +class LocalBuildDoc(setup_command.BuildDoc): + + builders = ['html'] + command_name = 'build_sphinx' + sphinx_initialized = False + + def _get_source_dir(self): + option_dict = self.distribution.get_option_dict('build_sphinx') + pbr_option_dict = self.distribution.get_option_dict('pbr') + _, api_doc_dir = pbr_option_dict.get('api_doc_dir', (None, 'api')) + if 'source_dir' in option_dict: + source_dir = os.path.join(option_dict['source_dir'][1], + api_doc_dir) + else: + source_dir = 'doc/source/' + api_doc_dir + if not os.path.exists(source_dir): + os.makedirs(source_dir) + return source_dir + + def generate_autoindex(self, excluded_modules=None): + log.info("[pbr] Autodocumenting from %s" + % os.path.abspath(os.curdir)) + modules = {} + source_dir = self._get_source_dir() + for pkg in self.distribution.packages: + if '.' not in pkg: + for dirpath, dirnames, files in os.walk(pkg): + _find_modules(modules, dirpath, files) + + def include(module): + return not any(fnmatch.fnmatch(module, pat) + for pat in excluded_modules) + + module_list = sorted(mod for mod in modules.keys() if include(mod)) + autoindex_filename = os.path.join(source_dir, 'autoindex.rst') + with open(autoindex_filename, 'w') as autoindex: + autoindex.write(""".. toctree:: + :maxdepth: 1 + +""") + for module in module_list: + output_filename = os.path.join(source_dir, + "%s.rst" % module) + heading = "The :mod:`%s` Module" % module + underline = "=" * len(heading) + values = dict(module=module, heading=heading, + underline=underline) + + log.info("[pbr] Generating %s" + % output_filename) + with open(output_filename, 'w') as output_file: + output_file.write(_rst_template % values) + autoindex.write(" %s.rst\n" % module) + + def _sphinx_tree(self): + source_dir = self._get_source_dir() + cmd = ['-H', 'Modules', '-o', source_dir, '.'] + if apidoc_use_padding: + cmd.insert(0, 'apidoc') + apidoc.main(cmd + self.autodoc_tree_excludes) + + def _sphinx_run(self): + if not self.verbose: + status_stream = cStringIO.StringIO() + else: + status_stream = sys.stdout + confoverrides = {} + if self.project: + confoverrides['project'] = self.project + if self.version: + confoverrides['version'] = self.version + if self.release: + confoverrides['release'] = self.release + if self.today: + confoverrides['today'] = self.today + if self.sphinx_initialized: + confoverrides['suppress_warnings'] = [ + 'app.add_directive', 'app.add_role', + 'app.add_generic_role', 'app.add_node', + 'image.nonlocal_uri', + ] + app = application.Sphinx( + self.source_dir, self.config_dir, + self.builder_target_dir, self.doctree_dir, + self.builder, confoverrides, status_stream, + freshenv=self.fresh_env, warningiserror=self.warning_is_error) + self.sphinx_initialized = True + + try: + app.build(force_all=self.all_files) + except Exception as err: + from docutils import utils + if isinstance(err, utils.SystemMessage): + sys.stder.write('reST markup error:\n') + sys.stderr.write(err.args[0].encode('ascii', + 'backslashreplace')) + sys.stderr.write('\n') + else: + raise + + if self.link_index: + src = app.config.master_doc + app.builder.out_suffix + dst = app.builder.get_outfilename('index') + os.symlink(src, dst) + + def run(self): + option_dict = self.distribution.get_option_dict('pbr') + if git._git_is_installed(): + git.write_git_changelog(option_dict=option_dict) + git.generate_authors(option_dict=option_dict) + tree_index = options.get_boolean_option(option_dict, + 'autodoc_tree_index_modules', + 'AUTODOC_TREE_INDEX_MODULES') + auto_index = options.get_boolean_option(option_dict, + 'autodoc_index_modules', + 'AUTODOC_INDEX_MODULES') + if not os.getenv('SPHINX_DEBUG'): + # NOTE(afazekas): These options can be used together, + # but they do a very similar thing in a different way + if tree_index: + self._sphinx_tree() + if auto_index: + self.generate_autoindex( + set(option_dict.get( + "autodoc_exclude_modules", + [None, ""])[1].split())) + + self.finalize_options() + + is_multibuilder_sphinx = version.SemanticVersion.from_pip_string( + sphinx.__version__) >= version.SemanticVersion(1, 6) + + # TODO(stephenfin): Remove support for Sphinx < 1.6 in 4.0 + if not is_multibuilder_sphinx: + log.warn('[pbr] Support for Sphinx < 1.6 will be dropped in ' + 'pbr 4.0. Upgrade to Sphinx 1.6+') + + # TODO(stephenfin): Remove this at the next MAJOR version bump + if self.builders != ['html']: + log.warn("[pbr] Sphinx 1.6 added native support for " + "specifying multiple builders in the " + "'[sphinx_build] builder' configuration option, " + "found in 'setup.cfg'. As a result, the " + "'[sphinx_build] builders' option has been " + "deprecated and will be removed in pbr 4.0. Migrate " + "to the 'builder' configuration option.") + if is_multibuilder_sphinx: + self.builder = self.builders + + if is_multibuilder_sphinx: + # Sphinx >= 1.6 + return setup_command.BuildDoc.run(self) + + # Sphinx < 1.6 + for builder in self.builder: + self.builder = builder + self.finalize_options() + self._sphinx_run() + + def initialize_options(self): + # Not a new style class, super keyword does not work. + setup_command.BuildDoc.initialize_options(self) + + # NOTE(dstanek): exclude setup.py from the autodoc tree index + # builds because all projects will have an issue with it + self.autodoc_tree_excludes = ['setup.py'] + + def finalize_options(self): + from pbr import util + + # Not a new style class, super keyword does not work. + setup_command.BuildDoc.finalize_options(self) + + # Handle builder option from command line - override cfg + option_dict = self.distribution.get_option_dict('build_sphinx') + if 'command line' in option_dict.get('builder', [[]])[0]: + self.builders = option_dict['builder'][1] + # Allow builders to be configurable - as a comma separated list. + if not isinstance(self.builders, list) and self.builders: + self.builders = self.builders.split(',') + + self.project = self.distribution.get_name() + self.version = self.distribution.get_version() + self.release = self.distribution.get_version() + + # NOTE(dstanek): check for autodoc tree exclusion overrides + # in the setup.cfg + opt = 'autodoc_tree_excludes' + option_dict = self.distribution.get_option_dict('pbr') + if opt in option_dict: + self.autodoc_tree_excludes = util.split_multiline( + option_dict[opt][1]) + + # handle Sphinx < 1.5.0 + if not hasattr(self, 'warning_is_error'): + self.warning_is_error = False diff --git a/pbr-0.43.11-py2.7.egg/pbr/cmd/__init__.py b/pbr-0.43.11-py2.7.egg/pbr/cmd/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/cmd/main.py b/pbr-0.43.11-py2.7.egg/pbr/cmd/main.py new file mode 100644 index 00000000..29cd61d7 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/cmd/main.py @@ -0,0 +1,112 @@ +# Copyright 2014 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import argparse +import json +import sys + +import pkg_resources + +import pbr.version + + +def _get_metadata(package_name): + try: + return json.loads( + pkg_resources.get_distribution( + package_name).get_metadata('pbr.json')) + except pkg_resources.DistributionNotFound: + raise Exception('Package {0} not installed'.format(package_name)) + except Exception: + return None + + +def get_sha(args): + sha = _get_info(args.name)['sha'] + if sha: + print(sha) + + +def get_info(args): + print("{name}\t{version}\t{released}\t{sha}".format( + **_get_info(args.name))) + + +def _get_info(name): + metadata = _get_metadata(name) + version = pkg_resources.get_distribution(name).version + if metadata: + if metadata['is_release']: + released = 'released' + else: + released = 'pre-release' + sha = metadata['git_version'] + else: + version_parts = version.split('.') + if version_parts[-1].startswith('g'): + sha = version_parts[-1][1:] + released = 'pre-release' + else: + sha = "" + released = "released" + for part in version_parts: + if not part.isdigit(): + released = "pre-release" + return dict(name=name, version=version, sha=sha, released=released) + + +def freeze(args): + sorted_dists = sorted(pkg_resources.working_set, + key=lambda dist: dist.project_name.lower()) + for dist in sorted_dists: + info = _get_info(dist.project_name) + output = "{name}=={version}".format(**info) + if info['sha']: + output += " # git sha {sha}".format(**info) + print(output) + + +def main(): + parser = argparse.ArgumentParser( + description='pbr: Python Build Reasonableness') + parser.add_argument( + '-v', '--version', action='version', + version=str(pbr.version.VersionInfo('pbr'))) + + subparsers = parser.add_subparsers( + title='commands', description='valid commands', help='additional help') + + cmd_sha = subparsers.add_parser('sha', help='print sha of package') + cmd_sha.set_defaults(func=get_sha) + cmd_sha.add_argument('name', help='package to print sha of') + + cmd_info = subparsers.add_parser( + 'info', help='print version info for package') + cmd_info.set_defaults(func=get_info) + cmd_info.add_argument('name', help='package to print info of') + + cmd_freeze = subparsers.add_parser( + 'freeze', help='print version info for all installed packages') + cmd_freeze.set_defaults(func=freeze) + + args = parser.parse_args() + try: + args.func(args) + except Exception as e: + print(e) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/pbr-0.43.11-py2.7.egg/pbr/core.py b/pbr-0.43.11-py2.7.egg/pbr/core.py new file mode 100644 index 00000000..a93253ba --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/core.py @@ -0,0 +1,134 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +from distutils import core +from distutils import errors +import logging +import os +import sys +import warnings + +from pbr import util + + +if sys.version_info[0] == 3: + string_type = str + integer_types = (int,) +else: + string_type = basestring # flake8: noqa + integer_types = (int, long) # flake8: noqa + + +def pbr(dist, attr, value): + """Implements the actual pbr setup() keyword. + + When used, this should be the only keyword in your setup() aside from + `setup_requires`. + + If given as a string, the value of pbr is assumed to be the relative path + to the setup.cfg file to use. Otherwise, if it evaluates to true, it + simply assumes that pbr should be used, and the default 'setup.cfg' is + used. + + This works by reading the setup.cfg file, parsing out the supported + metadata and command options, and using them to rebuild the + `DistributionMetadata` object and set the newly added command options. + + The reason for doing things this way is that a custom `Distribution` class + will not play nicely with setup_requires; however, this implementation may + not work well with distributions that do use a `Distribution` subclass. + """ + + if not value: + return + if isinstance(value, string_type): + path = os.path.abspath(value) + else: + path = os.path.abspath('setup.cfg') + if not os.path.exists(path): + raise errors.DistutilsFileError( + 'The setup.cfg file %s does not exist.' % path) + + # Converts the setup.cfg file to setup() arguments + try: + attrs = util.cfg_to_args(path, dist.script_args) + except Exception: + e = sys.exc_info()[1] + # NB: This will output to the console if no explicit logging has + # been setup - but thats fine, this is a fatal distutils error, so + # being pretty isn't the #1 goal.. being diagnosable is. + logging.exception('Error parsing') + raise errors.DistutilsSetupError( + 'Error parsing %s: %s: %s' % (path, e.__class__.__name__, e)) + + # Repeat some of the Distribution initialization code with the newly + # provided attrs + if attrs: + # Skips 'options' and 'licence' support which are rarely used; may + # add back in later if demanded + for key, val in attrs.items(): + if hasattr(dist.metadata, 'set_' + key): + getattr(dist.metadata, 'set_' + key)(val) + elif hasattr(dist.metadata, key): + setattr(dist.metadata, key, val) + elif hasattr(dist, key): + setattr(dist, key, val) + else: + msg = 'Unknown distribution option: %s' % repr(key) + warnings.warn(msg) + + # Re-finalize the underlying Distribution + try: + super(dist.__class__, dist).finalize_options() + except TypeError: + # If dist is not declared as a new-style class (with object as + # a subclass) then super() will not work on it. This is the case + # for Python 2. In that case, fall back to doing this the ugly way + dist.__class__.__bases__[-1].finalize_options(dist) + + # This bit comes out of distribute/setuptools + if isinstance(dist.metadata.version, integer_types + (float,)): + # Some people apparently take "version number" too literally :) + dist.metadata.version = str(dist.metadata.version) diff --git a/pbr-0.43.11-py2.7.egg/pbr/extra_files.py b/pbr-0.43.11-py2.7.egg/pbr/extra_files.py new file mode 100644 index 00000000..a72db0c1 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/extra_files.py @@ -0,0 +1,35 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from distutils import errors +import os + +_extra_files = [] + + +def get_extra_files(): + global _extra_files + return _extra_files + + +def set_extra_files(extra_files): + # Let's do a sanity check + for filename in extra_files: + if not os.path.exists(filename): + raise errors.DistutilsFileError( + '%s from the extra_files option in setup.cfg does not ' + 'exist' % filename) + global _extra_files + _extra_files[:] = extra_files[:] diff --git a/pbr-0.43.11-py2.7.egg/pbr/find_package.py b/pbr-0.43.11-py2.7.egg/pbr/find_package.py new file mode 100644 index 00000000..717e93da --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/find_package.py @@ -0,0 +1,29 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +import setuptools + + +def smart_find_packages(package_list): + """Run find_packages the way we intend.""" + packages = [] + for pkg in package_list.strip().split("\n"): + pkg_path = pkg.replace('.', os.path.sep) + packages.append(pkg) + packages.extend(['%s.%s' % (pkg, f) + for f in setuptools.find_packages(pkg_path)]) + return "\n".join(set(packages)) diff --git a/pbr-0.43.11-py2.7.egg/pbr/git.py b/pbr-0.43.11-py2.7.egg/pbr/git.py new file mode 100644 index 00000000..f2855dde --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/git.py @@ -0,0 +1,331 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import unicode_literals + +import distutils.errors +from distutils import log +import errno +import io +import os +import re +import subprocess +import time + +import pkg_resources + +from pbr import options +from pbr import version + + +def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None): + if buffer: + out_location = subprocess.PIPE + err_location = subprocess.PIPE + else: + out_location = None + err_location = None + + newenv = os.environ.copy() + if env: + newenv.update(env) + + output = subprocess.Popen(cmd, + stdout=out_location, + stderr=err_location, + env=newenv) + out = output.communicate() + if output.returncode and throw_on_error: + raise distutils.errors.DistutilsError( + "%s returned %d" % (cmd, output.returncode)) + if len(out) == 0 or not out[0] or not out[0].strip(): + return '' + # Since we don't control the history, and forcing users to rebase arbitrary + # history to fix utf8 issues is harsh, decode with replace. + return out[0].strip().decode('utf-8', 'replace') + + +def _run_git_command(cmd, git_dir, **kwargs): + if not isinstance(cmd, (list, tuple)): + cmd = [cmd] + return _run_shell_command( + ['git', '--git-dir=%s' % git_dir] + cmd, **kwargs) + + +def _get_git_directory(): + try: + return _run_shell_command(['git', 'rev-parse', '--git-dir']) + except OSError as e: + if e.errno == errno.ENOENT: + # git not installed. + return '' + raise + + +def _git_is_installed(): + try: + # We cannot use 'which git' as it may not be available + # in some distributions, So just try 'git --version' + # to see if we run into trouble + _run_shell_command(['git', '--version']) + except OSError: + return False + return True + + +def _get_highest_tag(tags): + """Find the highest tag from a list. + + Pass in a list of tag strings and this will return the highest + (latest) as sorted by the pkg_resources version parser. + """ + return max(tags, key=pkg_resources.parse_version) + + +def _find_git_files(dirname='', git_dir=None): + """Behave like a file finder entrypoint plugin. + + We don't actually use the entrypoints system for this because it runs + at absurd times. We only want to do this when we are building an sdist. + """ + file_list = [] + if git_dir is None: + git_dir = _run_git_functions() + if git_dir: + log.info("[pbr] In git context, generating filelist from git") + file_list = _run_git_command(['ls-files', '-z'], git_dir) + # Users can fix utf8 issues locally with a single commit, so we are + # strict here. + file_list = file_list.split(b'\x00'.decode('utf-8')) + return [f for f in file_list if f] + + +def _get_raw_tag_info(git_dir): + describe = _run_git_command(['describe', '--always'], git_dir) + if "-" in describe: + return describe.rsplit("-", 2)[-2] + if "." in describe: + return 0 + return None + + +def get_is_release(git_dir): + return _get_raw_tag_info(git_dir) == 0 + + +def _run_git_functions(): + git_dir = None + if _git_is_installed(): + git_dir = _get_git_directory() + return git_dir or None + + +def get_git_short_sha(git_dir=None): + """Return the short sha for this repo, if it exists.""" + if not git_dir: + git_dir = _run_git_functions() + if git_dir: + return _run_git_command( + ['log', '-n1', '--pretty=format:%h'], git_dir) + return None + + +def _clean_changelog_message(msg): + """Cleans any instances of invalid sphinx wording. + + This escapes/removes any instances of invalid characters + that can be interpreted by sphinx as a warning or error + when translating the Changelog into an HTML file for + documentation building within projects. + + * Escapes '_' which is interpreted as a link + * Escapes '*' which is interpreted as a new line + * Escapes '`' which is interpreted as a literal + """ + + msg = msg.replace('*', '\*') + msg = msg.replace('_', '\_') + msg = msg.replace('`', '\`') + + return msg + + +def _iter_changelog(changelog): + """Convert a oneline log iterator to formatted strings. + + :param changelog: An iterator of one line log entries like + that given by _iter_log_oneline. + :return: An iterator over (release, formatted changelog) tuples. + """ + first_line = True + current_release = None + yield current_release, "CHANGES\n=======\n\n" + for hash, tags, msg in changelog: + if tags: + current_release = _get_highest_tag(tags) + underline = len(current_release) * '-' + if not first_line: + yield current_release, '\n' + yield current_release, ( + "%(tag)s\n%(underline)s\n\n" % + dict(tag=current_release, underline=underline)) + + if not msg.startswith("Merge "): + if msg.endswith("."): + msg = msg[:-1] + msg = _clean_changelog_message(msg) + yield current_release, "* %(msg)s\n" % dict(msg=msg) + first_line = False + + +def _iter_log_oneline(git_dir=None): + """Iterate over --oneline log entries if possible. + + This parses the output into a structured form but does not apply + presentation logic to the output - making it suitable for different + uses. + + :return: An iterator of (hash, tags_set, 1st_line) tuples, or None if + changelog generation is disabled / not available. + """ + if git_dir is None: + git_dir = _get_git_directory() + if not git_dir: + return [] + return _iter_log_inner(git_dir) + + +def _is_valid_version(candidate): + try: + version.SemanticVersion.from_pip_string(candidate) + return True + except ValueError: + return False + + +def _iter_log_inner(git_dir): + """Iterate over --oneline log entries. + + This parses the output intro a structured form but does not apply + presentation logic to the output - making it suitable for different + uses. + + :return: An iterator of (hash, tags_set, 1st_line) tuples. + """ + log.info('[pbr] Generating ChangeLog') + log_cmd = ['log', '--decorate=full', '--format=%h%x00%s%x00%d'] + changelog = _run_git_command(log_cmd, git_dir) + for line in changelog.split('\n'): + line_parts = line.split('\x00') + if len(line_parts) != 3: + continue + sha, msg, refname = line_parts + tags = set() + + # refname can be: + # + # HEAD, tag: refs/tags/1.4.0, refs/remotes/origin/master, \ + # refs/heads/master + # refs/tags/1.3.4 + if "refs/tags/" in refname: + refname = refname.strip()[1:-1] # remove wrapping ()'s + # If we start with "tag: refs/tags/1.2b1, tag: refs/tags/1.2" + # The first split gives us "['', '1.2b1, tag:', '1.2']" + # Which is why we do the second split below on the comma + for tag_string in refname.split("refs/tags/")[1:]: + # git tag does not allow : or " " in tag names, so we split + # on ", " which is the separator between elements + candidate = tag_string.split(", ")[0] + if _is_valid_version(candidate): + tags.add(candidate) + + yield sha, tags, msg + + +def write_git_changelog(git_dir=None, dest_dir=os.path.curdir, + option_dict=None, changelog=None): + """Write a changelog based on the git changelog.""" + start = time.time() + if not option_dict: + option_dict = {} + should_skip = options.get_boolean_option(option_dict, 'skip_changelog', + 'SKIP_WRITE_GIT_CHANGELOG') + if should_skip: + return + if not changelog: + changelog = _iter_log_oneline(git_dir=git_dir) + if changelog: + changelog = _iter_changelog(changelog) + if not changelog: + return + new_changelog = os.path.join(dest_dir, 'ChangeLog') + # If there's already a ChangeLog and it's not writable, just use it + if (os.path.exists(new_changelog) + and not os.access(new_changelog, os.W_OK)): + log.info('[pbr] ChangeLog not written (file already' + ' exists and it is not writeable)') + return + log.info('[pbr] Writing ChangeLog') + with io.open(new_changelog, "w", encoding="utf-8") as changelog_file: + for release, content in changelog: + changelog_file.write(content) + stop = time.time() + log.info('[pbr] ChangeLog complete (%0.1fs)' % (stop - start)) + + +def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()): + """Create AUTHORS file using git commits.""" + should_skip = options.get_boolean_option(option_dict, 'skip_authors', + 'SKIP_GENERATE_AUTHORS') + if should_skip: + return + start = time.time() + old_authors = os.path.join(dest_dir, 'AUTHORS.in') + new_authors = os.path.join(dest_dir, 'AUTHORS') + # If there's already an AUTHORS file and it's not writable, just use it + if (os.path.exists(new_authors) + and not os.access(new_authors, os.W_OK)): + return + log.info('[pbr] Generating AUTHORS') + ignore_emails = '(jenkins@review|infra@lists|jenkins@openstack)' + if git_dir is None: + git_dir = _get_git_directory() + if git_dir: + authors = [] + + # don't include jenkins email address in AUTHORS file + git_log_cmd = ['log', '--format=%aN <%aE>'] + authors += _run_git_command(git_log_cmd, git_dir).split('\n') + authors = [a for a in authors if not re.search(ignore_emails, a)] + + # get all co-authors from commit messages + co_authors_out = _run_git_command('log', git_dir) + co_authors = re.findall('Co-authored-by:.+', co_authors_out, + re.MULTILINE) + co_authors = [signed.split(":", 1)[1].strip() + for signed in co_authors if signed] + + authors += co_authors + authors = sorted(set(authors)) + + with open(new_authors, 'wb') as new_authors_fh: + if os.path.exists(old_authors): + with open(old_authors, "rb") as old_authors_fh: + new_authors_fh.write(old_authors_fh.read()) + new_authors_fh.write(('\n'.join(authors) + '\n') + .encode('utf-8')) + stop = time.time() + log.info('[pbr] AUTHORS complete (%0.1fs)' % (stop - start)) diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/__init__.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/__init__.py new file mode 100644 index 00000000..f0056c0e --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import backwards +from pbr.hooks import commands +from pbr.hooks import files +from pbr.hooks import metadata + + +def setup_hook(config): + """Filter config parsed from a setup.cfg to inject our defaults.""" + metadata_config = metadata.MetadataConfig(config) + metadata_config.run() + backwards.BackwardsCompatConfig(config).run() + commands.CommandsConfig(config).run() + files.FilesConfig(config, metadata_config.get_name()).run() diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/backwards.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/backwards.py new file mode 100644 index 00000000..01f07ab8 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/backwards.py @@ -0,0 +1,33 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import base +from pbr import packaging + + +class BackwardsCompatConfig(base.BaseConfig): + + section = 'backwards_compat' + + def hook(self): + self.config['include_package_data'] = 'True' + packaging.append_text_list( + self.config, 'dependency_links', + packaging.parse_dependency_links()) + packaging.append_text_list( + self.config, 'tests_require', + packaging.parse_requirements( + packaging.TEST_REQUIREMENTS_FILES, + strip_markers=True)) diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/base.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/base.py new file mode 100644 index 00000000..6672a362 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/base.py @@ -0,0 +1,34 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +class BaseConfig(object): + + section = None + + def __init__(self, config): + self._global_config = config + self.config = self._global_config.get(self.section, dict()) + self.pbr_config = config.get('pbr', dict()) + + def run(self): + self.hook() + self.save() + + def hook(self): + pass + + def save(self): + self._global_config[self.section] = self.config diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/commands.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/commands.py new file mode 100644 index 00000000..aa4db704 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/commands.py @@ -0,0 +1,66 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +from setuptools.command import easy_install + +from pbr.hooks import base +from pbr import options +from pbr import packaging + + +class CommandsConfig(base.BaseConfig): + + section = 'global' + + def __init__(self, config): + super(CommandsConfig, self).__init__(config) + self.commands = self.config.get('commands', "") + + def save(self): + self.config['commands'] = self.commands + super(CommandsConfig, self).save() + + def add_command(self, command): + self.commands = "%s\n%s" % (self.commands, command) + + def hook(self): + self.add_command('pbr.packaging.LocalEggInfo') + self.add_command('pbr.packaging.LocalSDist') + self.add_command('pbr.packaging.LocalInstallScripts') + self.add_command('pbr.packaging.LocalDevelop') + self.add_command('pbr.packaging.LocalRPMVersion') + self.add_command('pbr.packaging.LocalDebVersion') + if os.name != 'nt': + easy_install.get_script_args = packaging.override_get_script_args + + if packaging.have_sphinx(): + self.add_command('pbr.builddoc.LocalBuildDoc') + + if os.path.exists('.testr.conf') and packaging.have_testr(): + # There is a .testr.conf file. We want to use it. + self.add_command('pbr.packaging.TestrTest') + elif self.config.get('nosetests', False) and packaging.have_nose(): + # We seem to still have nose configured + self.add_command('pbr.packaging.NoseTest') + + use_egg = options.get_boolean_option( + self.pbr_config, 'use-egg', 'PBR_USE_EGG') + # We always want non-egg install unless explicitly requested + if 'manpages' in self.pbr_config or not use_egg: + self.add_command('pbr.packaging.LocalInstall') + else: + self.add_command('pbr.packaging.InstallWithGit') diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/files.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/files.py new file mode 100644 index 00000000..48bf9e31 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/files.py @@ -0,0 +1,103 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import sys + +from pbr import find_package +from pbr.hooks import base + + +def get_manpath(): + manpath = 'share/man' + if os.path.exists(os.path.join(sys.prefix, 'man')): + # This works around a bug with install where it expects every node + # in the relative data directory to be an actual directory, since at + # least Debian derivatives (and probably other platforms as well) + # like to symlink Unixish /usr/local/man to /usr/local/share/man. + manpath = 'man' + return manpath + + +def get_man_section(section): + return os.path.join(get_manpath(), 'man%s' % section) + + +class FilesConfig(base.BaseConfig): + + section = 'files' + + def __init__(self, config, name): + super(FilesConfig, self).__init__(config) + self.name = name + self.data_files = self.config.get('data_files', '') + + def save(self): + self.config['data_files'] = self.data_files + super(FilesConfig, self).save() + + def expand_globs(self): + finished = [] + for line in self.data_files.split("\n"): + if line.rstrip().endswith('*') and '=' in line: + (target, source_glob) = line.split('=') + source_prefix = source_glob.strip()[:-1] + target = target.strip() + if not target.endswith(os.path.sep): + target += os.path.sep + for (dirpath, dirnames, fnames) in os.walk(source_prefix): + finished.append( + "%s = " % dirpath.replace(source_prefix, target)) + finished.extend( + [" %s" % os.path.join(dirpath, f) for f in fnames]) + else: + finished.append(line) + + self.data_files = "\n".join(finished) + + def add_man_path(self, man_path): + self.data_files = "%s\n%s =" % (self.data_files, man_path) + + def add_man_page(self, man_page): + self.data_files = "%s\n %s" % (self.data_files, man_page) + + def get_man_sections(self): + man_sections = dict() + manpages = self.pbr_config['manpages'] + for manpage in manpages.split(): + section_number = manpage.strip()[-1] + section = man_sections.get(section_number, list()) + section.append(manpage.strip()) + man_sections[section_number] = section + return man_sections + + def hook(self): + packages = self.config.get('packages', self.name).strip() + expanded = [] + for pkg in packages.split("\n"): + if os.path.isdir(pkg.strip()): + expanded.append(find_package.smart_find_packages(pkg.strip())) + + self.config['packages'] = "\n".join(expanded) + + self.expand_globs() + + if 'manpages' in self.pbr_config: + man_sections = self.get_man_sections() + for (section, pages) in man_sections.items(): + manpath = get_man_section(section) + self.add_man_path(manpath) + for page in pages: + self.add_man_page(page) diff --git a/pbr-0.43.11-py2.7.egg/pbr/hooks/metadata.py b/pbr-0.43.11-py2.7.egg/pbr/hooks/metadata.py new file mode 100644 index 00000000..3f65b6d7 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/hooks/metadata.py @@ -0,0 +1,32 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import base +from pbr import packaging + + +class MetadataConfig(base.BaseConfig): + + section = 'metadata' + + def hook(self): + self.config['version'] = packaging.get_version( + self.config['name'], self.config.get('version', None)) + packaging.append_text_list( + self.config, 'requires_dist', + packaging.parse_requirements()) + + def get_name(self): + return self.config['name'] diff --git a/pbr-0.43.11-py2.7.egg/pbr/options.py b/pbr-0.43.11-py2.7.egg/pbr/options.py new file mode 100644 index 00000000..105b200e --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/options.py @@ -0,0 +1,53 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +import os + + +TRUE_VALUES = ('true', '1', 'yes') + + +def get_boolean_option(option_dict, option_name, env_name): + return ((option_name in option_dict + and option_dict[option_name][1].lower() in TRUE_VALUES) or + str(os.getenv(env_name)).lower() in TRUE_VALUES) diff --git a/pbr-0.43.11-py2.7.egg/pbr/packaging.py b/pbr-0.43.11-py2.7.egg/pbr/packaging.py new file mode 100644 index 00000000..1ff28da9 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/packaging.py @@ -0,0 +1,823 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Utilities with minimum-depends for use in setup.py +""" + +from __future__ import unicode_literals + +from distutils.command import install as du_install +from distutils import log +import email +import email.errors +import os +import re +import sys +import warnings + +import pkg_resources +import setuptools +from setuptools.command import develop +from setuptools.command import easy_install +from setuptools.command import egg_info +from setuptools.command import install +from setuptools.command import install_scripts +from setuptools.command import sdist + +from pbr import extra_files +from pbr import git +from pbr import options +import pbr.pbr_json +from pbr import testr_command +from pbr import version + +REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires') +PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in ( + 'requirements-py%d.txt', 'tools/pip-requires-py%d')] +TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires') + + +def get_requirements_files(): + files = os.environ.get("PBR_REQUIREMENTS_FILES") + if files: + return tuple(f.strip() for f in files.split(',')) + # Returns a list composed of: + # - REQUIREMENTS_FILES with -py2 or -py3 in the name + # (e.g. requirements-py3.txt) + # - REQUIREMENTS_FILES + + return PY_REQUIREMENTS_FILES + list(REQUIREMENTS_FILES) + + +def append_text_list(config, key, text_list): + """Append a \n separated list to possibly existing value.""" + new_value = [] + current_value = config.get(key, "") + if current_value: + new_value.append(current_value) + new_value.extend(text_list) + config[key] = '\n'.join(new_value) + + +def _any_existing(file_list): + return [f for f in file_list if os.path.exists(f)] + + +# Get requirements from the first file that exists +def get_reqs_from_files(requirements_files): + existing = _any_existing(requirements_files) + + deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES] + if deprecated: + warnings.warn('Support for \'-pyN\'-suffixed requirements files is ' + 'deprecated in pbr 4.0 and will be removed in 5.0. ' + 'Use environment markers instead. Conflicting files: ' + '%r' % deprecated, + DeprecationWarning) + + for requirements_file in existing: + with open(requirements_file, 'r') as fil: + return fil.read().split('\n') + + return [] + + +def parse_requirements(requirements_files=None, strip_markers=False): + + if requirements_files is None: + requirements_files = get_requirements_files() + + def egg_fragment(match): + # take a versioned egg fragment and return a + # versioned package requirement e.g. + # nova-1.2.3 becomes nova>=1.2.3 + return re.sub(r'([\w.]+)-([\w.-]+)', + r'\1>=\2', + match.groups()[-1]) + + requirements = [] + for line in get_reqs_from_files(requirements_files): + # Ignore comments + if (not line.strip()) or line.startswith('#'): + continue + + # Ignore index URL lines + if re.match(r'^\s*(-i|--index-url|--extra-index-url).*', line): + continue + + # Handle nested requirements files such as: + # -r other-requirements.txt + if line.startswith('-r'): + req_file = line.partition(' ')[2] + requirements += parse_requirements( + [req_file], strip_markers=strip_markers) + continue + + try: + project_name = pkg_resources.Requirement.parse(line).project_name + except ValueError: + project_name = None + + # For the requirements list, we need to inject only the portion + # after egg= so that distutils knows the package it's looking for + # such as: + # -e git://github.com/openstack/nova/master#egg=nova + # -e git://github.com/openstack/nova/master#egg=nova-1.2.3 + if re.match(r'\s*-e\s+', line): + line = re.sub(r'\s*-e\s+.*#egg=(.*)$', egg_fragment, line) + # such as: + # http://github.com/openstack/nova/zipball/master#egg=nova + # http://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 + elif re.match(r'\s*(https?|git(\+(https|ssh))?):', line): + line = re.sub(r'\s*(https?|git(\+(https|ssh))?):.*#egg=(.*)$', + egg_fragment, line) + # -f lines are for index locations, and don't get used here + elif re.match(r'\s*-f\s+', line): + line = None + reason = 'Index Location' + + if line is not None: + line = re.sub('#.*$', '', line) + if strip_markers: + semi_pos = line.find(';') + if semi_pos < 0: + semi_pos = None + line = line[:semi_pos] + requirements.append(line) + else: + log.info( + '[pbr] Excluding %s: %s' % (project_name, reason)) + + return requirements + + +def parse_dependency_links(requirements_files=None): + if requirements_files is None: + requirements_files = get_requirements_files() + dependency_links = [] + # dependency_links inject alternate locations to find packages listed + # in requirements + for line in get_reqs_from_files(requirements_files): + # skip comments and blank lines + if re.match(r'(\s*#)|(\s*$)', line): + continue + # lines with -e or -f need the whole line, minus the flag + if re.match(r'\s*-[ef]\s+', line): + dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) + # lines that are only urls can go in unmolested + elif re.match(r'\s*(https?|git(\+(https|ssh))?):', line): + dependency_links.append(line) + return dependency_links + + +class InstallWithGit(install.install): + """Extracts ChangeLog and AUTHORS from git then installs. + + This is useful for e.g. readthedocs where the package is + installed and then docs built. + """ + + command_name = 'install' + + def run(self): + _from_git(self.distribution) + return install.install.run(self) + + +class LocalInstall(install.install): + """Runs python setup.py install in a sensible manner. + + Force a non-egg installed in the manner of + single-version-externally-managed, which allows us to install manpages + and config files. + """ + + command_name = 'install' + + def run(self): + _from_git(self.distribution) + return du_install.install.run(self) + + +class TestrTest(testr_command.Testr): + """Make setup.py test do the right thing.""" + + command_name = 'test' + + def run(self): + # Can't use super - base class old-style class + testr_command.Testr.run(self) + + +class LocalRPMVersion(setuptools.Command): + __doc__ = """Output the rpm *compatible* version string of this package""" + description = __doc__ + + user_options = [] + command_name = "rpm_version" + + def run(self): + log.info("[pbr] Extracting rpm version") + name = self.distribution.get_name() + print(version.VersionInfo(name).semantic_version().rpm_string()) + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + +class LocalDebVersion(setuptools.Command): + __doc__ = """Output the deb *compatible* version string of this package""" + description = __doc__ + + user_options = [] + command_name = "deb_version" + + def run(self): + log.info("[pbr] Extracting deb version") + name = self.distribution.get_name() + print(version.VersionInfo(name).semantic_version().debian_string()) + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + +def have_testr(): + return testr_command.have_testr + + +try: + from nose import commands + + class NoseTest(commands.nosetests): + """Fallback test runner if testr is a no-go.""" + + command_name = 'test' + description = 'DEPRECATED: Run unit tests using nose' + + def run(self): + warnings.warn('nose integration in pbr is deprecated. Please use ' + 'the native nose setuptools configuration or call ' + 'nose directly', + DeprecationWarning) + + # Can't use super - base class old-style class + commands.nosetests.run(self) + + _have_nose = True + +except ImportError: + _have_nose = False + + +def have_nose(): + return _have_nose + +_wsgi_text = """#PBR Generated from %(group)r + +import threading + +from %(module_name)s import %(import_target)s + +if __name__ == "__main__": + import argparse + import socket + import sys + import wsgiref.simple_server as wss + + my_ip = socket.gethostbyname(socket.gethostname()) + + parser = argparse.ArgumentParser( + description=%(import_target)s.__doc__, + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]') + parser.add_argument('--port', '-p', type=int, default=8000, + help='TCP port to listen on') + parser.add_argument('--host', '-b', default='', + help='IP to bind the server to') + parser.add_argument('args', + nargs=argparse.REMAINDER, + metavar='-- [passed options]', + help="'--' is the separator of the arguments used " + "to start the WSGI server and the arguments passed " + "to the WSGI application.") + args = parser.parse_args() + if args.args: + if args.args[0] == '--': + args.args.pop(0) + else: + parser.error("unrecognized arguments: %%s" %% ' '.join(args.args)) + sys.argv[1:] = args.args + server = wss.make_server(args.host, args.port, %(invoke_target)s()) + + print("*" * 80) + print("STARTING test server %(module_name)s.%(invoke_target)s") + url = "http://%%s:%%d/" %% (server.server_name, server.server_port) + print("Available at %%s" %% url) + print("DANGER! For testing only, do not use in production") + print("*" * 80) + sys.stdout.flush() + + server.serve_forever() +else: + application = None + app_lock = threading.Lock() + + with app_lock: + if application is None: + application = %(invoke_target)s() + +""" + +_script_text = """# PBR Generated from %(group)r + +import sys + +from %(module_name)s import %(import_target)s + + +if __name__ == "__main__": + sys.exit(%(invoke_target)s()) +""" + + +# the following allows us to specify different templates per entry +# point group when generating pbr scripts. +ENTRY_POINTS_MAP = { + 'console_scripts': _script_text, + 'gui_scripts': _script_text, + 'wsgi_scripts': _wsgi_text +} + + +def generate_script(group, entry_point, header, template): + """Generate the script based on the template. + + :param str group: + The entry-point group name, e.g., "console_scripts". + :param str header: + The first line of the script, e.g., "!#/usr/bin/env python". + :param str template: + The script template. + :returns: + The templated script content + :rtype: + str + """ + if not entry_point.attrs or len(entry_point.attrs) > 2: + raise ValueError("Script targets must be of the form " + "'func' or 'Class.class_method'.") + script_text = template % dict( + group=group, + module_name=entry_point.module_name, + import_target=entry_point.attrs[0], + invoke_target='.'.join(entry_point.attrs), + ) + return header + script_text + + +def override_get_script_args( + dist, executable=os.path.normpath(sys.executable), is_wininst=False): + """Override entrypoints console_script.""" + header = easy_install.get_script_header("", executable, is_wininst) + for group, template in ENTRY_POINTS_MAP.items(): + for name, ep in dist.get_entry_map(group).items(): + yield (name, generate_script(group, ep, header, template)) + + +class LocalDevelop(develop.develop): + + command_name = 'develop' + + def install_wrapper_scripts(self, dist): + if not self.exclude_scripts: + for args in override_get_script_args(dist): + self.write_script(*args) + + +class LocalInstallScripts(install_scripts.install_scripts): + """Intercepts console scripts entry_points.""" + command_name = 'install_scripts' + + def _make_wsgi_scripts_only(self, dist, executable, is_wininst): + header = easy_install.get_script_header("", executable, is_wininst) + wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts'] + for name, ep in dist.get_entry_map('wsgi_scripts').items(): + content = generate_script( + 'wsgi_scripts', ep, header, wsgi_script_template) + self.write_script(name, content) + + def run(self): + import distutils.command.install_scripts + + self.run_command("egg_info") + if self.distribution.scripts: + # run first to set up self.outfiles + distutils.command.install_scripts.install_scripts.run(self) + else: + self.outfiles = [] + + ei_cmd = self.get_finalized_command("egg_info") + dist = pkg_resources.Distribution( + ei_cmd.egg_base, + pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + executable = getattr( + bs_cmd, 'executable', easy_install.sys_executable) + is_wininst = getattr( + self.get_finalized_command("bdist_wininst"), '_is_running', False + ) + + if 'bdist_wheel' in self.distribution.have_run: + # We're building a wheel which has no way of generating mod_wsgi + # scripts for us. Let's build them. + # NOTE(sigmavirus24): This needs to happen here because, as the + # comment below indicates, no_ep is True when building a wheel. + self._make_wsgi_scripts_only(dist, executable, is_wininst) + + if self.no_ep: + # no_ep is True if we're installing into an .egg file or building + # a .whl file, in those cases, we do not want to build all of the + # entry-points listed for this package. + return + + for args in override_get_script_args(dist, executable, is_wininst): + self.write_script(*args) + + +class LocalManifestMaker(egg_info.manifest_maker): + """Add any files that are in git and some standard sensible files.""" + + def _add_pbr_defaults(self): + for template_line in [ + 'include AUTHORS', + 'include ChangeLog', + 'exclude .gitignore', + 'exclude .gitreview', + 'global-exclude *.pyc' + ]: + self.filelist.process_template_line(template_line) + + def add_defaults(self): + option_dict = self.distribution.get_option_dict('pbr') + + sdist.sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + self.filelist.extend(extra_files.get_extra_files()) + should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist', + 'SKIP_GIT_SDIST') + if not should_skip: + rcfiles = git._find_git_files() + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self._add_pbr_defaults() + self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + + +class LocalEggInfo(egg_info.egg_info): + """Override the egg_info command to regenerate SOURCES.txt sensibly.""" + + command_name = 'egg_info' + + def find_sources(self): + """Generate SOURCES.txt only if there isn't one already. + + If we are in an sdist command, then we always want to update + SOURCES.txt. If we are not in an sdist command, then it doesn't + matter one flip, and is actually destructive. + However, if we're in a git context, it's always the right thing to do + to recreate SOURCES.txt + """ + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + if (not os.path.exists(manifest_filename) or + os.path.exists('.git') or + 'sdist' in sys.argv): + log.info("[pbr] Processing SOURCES.txt") + mm = LocalManifestMaker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + else: + log.info("[pbr] Reusing existing SOURCES.txt") + self.filelist = egg_info.FileList() + for entry in open(manifest_filename, 'r').read().split('\n'): + self.filelist.append(entry) + + +def _from_git(distribution): + option_dict = distribution.get_option_dict('pbr') + changelog = git._iter_log_oneline() + if changelog: + changelog = git._iter_changelog(changelog) + git.write_git_changelog(option_dict=option_dict, changelog=changelog) + git.generate_authors(option_dict=option_dict) + + +class LocalSDist(sdist.sdist): + """Builds the ChangeLog and Authors files from VC first.""" + + command_name = 'sdist' + + def checking_reno(self): + """Ensure reno is installed and configured. + + We can't run reno-based commands if reno isn't installed/available, and + don't want to if the user isn't using it. + """ + if hasattr(self, '_has_reno'): + return self._has_reno + + try: + # versions of reno witout this module will not have the required + # feature, hence the import + from reno import setup_command # noqa + except ImportError: + log.info('[pbr] reno was not found or is too old. Skipping ' + 'release notes') + self._has_reno = False + return False + + conf, output_file, cache_file = setup_command.load_config( + self.distribution) + + if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)): + log.info('[pbr] reno does not appear to be configured. Skipping ' + 'release notes') + self._has_reno = False + return False + + self._files = [output_file, cache_file] + + log.info('[pbr] Generating release notes') + self._has_reno = True + + return True + + sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands + + def run(self): + _from_git(self.distribution) + # sdist.sdist is an old style class, can't use super() + sdist.sdist.run(self) + + def make_distribution(self): + # This is included in make_distribution because setuptools doesn't use + # 'get_file_list'. As such, this is the only hook point that runs after + # the commands in 'sub_commands' + if self.checking_reno(): + self.filelist.extend(self._files) + self.filelist.sort() + sdist.sdist.make_distribution(self) + +try: + from pbr import builddoc + _have_sphinx = True + # Import the symbols from their new home so the package API stays + # compatible. + LocalBuildDoc = builddoc.LocalBuildDoc +except ImportError: + _have_sphinx = False + LocalBuildDoc = None + + +def have_sphinx(): + return _have_sphinx + + +def _get_increment_kwargs(git_dir, tag): + """Calculate the sort of semver increment needed from git history. + + Every commit from HEAD to tag is consider for Sem-Ver metadata lines. + See the pbr docs for their syntax. + + :return: a dict of kwargs for passing into SemanticVersion.increment. + """ + result = {} + if tag: + version_spec = tag + "..HEAD" + else: + version_spec = "HEAD" + # Get the raw body of the commit messages so that we don't have to + # parse out any formatting whitespace and to avoid user settings on + # git log output affecting out ability to have working sem ver headers. + changelog = git._run_git_command(['log', '--pretty=%B', version_spec], + git_dir) + header_len = len('sem-ver:') + commands = [line[header_len:].strip() for line in changelog.split('\n') + if line.lower().startswith('sem-ver:')] + symbols = set() + for command in commands: + symbols.update([symbol.strip() for symbol in command.split(',')]) + + def _handle_symbol(symbol, symbols, impact): + if symbol in symbols: + result[impact] = True + symbols.discard(symbol) + _handle_symbol('bugfix', symbols, 'patch') + _handle_symbol('feature', symbols, 'minor') + _handle_symbol('deprecation', symbols, 'minor') + _handle_symbol('api-break', symbols, 'major') + for symbol in symbols: + log.info('[pbr] Unknown Sem-Ver symbol %r' % symbol) + # We don't want patch in the kwargs since it is not a keyword argument - + # its the default minimum increment. + result.pop('patch', None) + return result + + +def _get_revno_and_last_tag(git_dir): + """Return the commit data about the most recent tag. + + We use git-describe to find this out, but if there are no + tags then we fall back to counting commits since the beginning + of time. + """ + changelog = git._iter_log_oneline(git_dir=git_dir) + row_count = 0 + for row_count, (ignored, tag_set, ignored) in enumerate(changelog): + version_tags = set() + semver_to_tag = dict() + for tag in list(tag_set): + try: + semver = version.SemanticVersion.from_pip_string(tag) + semver_to_tag[semver] = tag + version_tags.add(semver) + except Exception: + pass + if version_tags: + return semver_to_tag[max(version_tags)], row_count + return "", row_count + + +def _get_version_from_git_target(git_dir, target_version): + """Calculate a version from a target version in git_dir. + + This is used for untagged versions only. A new version is calculated as + necessary based on git metadata - distance to tags, current hash, contents + of commit messages. + + :param git_dir: The git directory we're working from. + :param target_version: If None, the last tagged version (or 0 if there are + no tags yet) is incremented as needed to produce an appropriate target + version following semver rules. Otherwise target_version is used as a + constraint - if semver rules would result in a newer version then an + exception is raised. + :return: A semver version object. + """ + tag, distance = _get_revno_and_last_tag(git_dir) + last_semver = version.SemanticVersion.from_pip_string(tag or '0') + if distance == 0: + new_version = last_semver + else: + new_version = last_semver.increment( + **_get_increment_kwargs(git_dir, tag)) + if target_version is not None and new_version > target_version: + raise ValueError( + "git history requires a target version of %(new)s, but target " + "version is %(target)s" % + dict(new=new_version, target=target_version)) + if distance == 0: + return last_semver + new_dev = new_version.to_dev(distance) + if target_version is not None: + target_dev = target_version.to_dev(distance) + if target_dev > new_dev: + return target_dev + return new_dev + + +def _get_version_from_git(pre_version=None): + """Calculate a version string from git. + + If the revision is tagged, return that. Otherwise calculate a semantic + version description of the tree. + + The number of revisions since the last tag is included in the dev counter + in the version for untagged versions. + + :param pre_version: If supplied use this as the target version rather than + inferring one from the last tag + commit messages. + """ + git_dir = git._run_git_functions() + if git_dir: + try: + tagged = git._run_git_command( + ['describe', '--exact-match'], git_dir, + throw_on_error=True).replace('-', '.') + target_version = version.SemanticVersion.from_pip_string(tagged) + except Exception: + if pre_version: + # not released yet - use pre_version as the target + target_version = version.SemanticVersion.from_pip_string( + pre_version) + else: + # not released yet - just calculate from git history + target_version = None + result = _get_version_from_git_target(git_dir, target_version) + return result.release_string() + # If we don't know the version, return an empty string so at least + # the downstream users of the value always have the same type of + # object to work with. + try: + return unicode() + except NameError: + return '' + + +def _get_version_from_pkg_metadata(package_name): + """Get the version from package metadata if present. + + This looks for PKG-INFO if present (for sdists), and if not looks + for METADATA (for wheels) and failing that will return None. + """ + pkg_metadata_filenames = ['PKG-INFO', 'METADATA'] + pkg_metadata = {} + for filename in pkg_metadata_filenames: + try: + pkg_metadata_file = open(filename, 'r') + except (IOError, OSError): + continue + try: + pkg_metadata = email.message_from_file(pkg_metadata_file) + except email.errors.MessageError: + continue + + # Check to make sure we're in our own dir + if pkg_metadata.get('Name', None) != package_name: + return None + return pkg_metadata.get('Version', None) + + +def get_version(package_name, pre_version=None): + """Get the version of the project. + + First, try getting it from PKG-INFO or METADATA, if it exists. If it does, + that means we're in a distribution tarball or that install has happened. + Otherwise, if there is no PKG-INFO or METADATA file, pull the version + from git. + + We do not support setup.py version sanity in git archive tarballs, nor do + we support packagers directly sucking our git repo into theirs. We expect + that a source tarball be made from our git repo - or that if someone wants + to make a source tarball from a fork of our repo with additional tags in it + that they understand and desire the results of doing that. + + :param pre_version: The version field from setup.cfg - if set then this + version will be the next release. + """ + version = os.environ.get( + "PBR_VERSION", + os.environ.get("OSLO_PACKAGE_VERSION", None)) + if version: + return version + version = _get_version_from_pkg_metadata(package_name) + if version: + return version + version = _get_version_from_git(pre_version) + # Handle http://bugs.python.org/issue11638 + # version will either be an empty unicode string or a valid + # unicode version string, but either way it's unicode and needs to + # be encoded. + if sys.version_info[0] == 2: + version = version.encode('utf-8') + if version: + return version + raise Exception("Versioning for this project requires either an sdist" + " tarball, or access to an upstream git repository." + " It's also possible that there is a mismatch between" + " the package name in setup.cfg and the argument given" + " to pbr.version.VersionInfo. Project name {name} was" + " given, but was not able to be found.".format( + name=package_name)) + + +# This is added because pbr uses pbr to install itself. That means that +# any changes to the egg info writer entrypoints must be forward and +# backward compatible. This maintains the pbr.packaging.write_pbr_json +# path. +write_pbr_json = pbr.pbr_json.write_pbr_json diff --git a/pbr-0.43.11-py2.7.egg/pbr/pbr_json.py b/pbr-0.43.11-py2.7.egg/pbr/pbr_json.py new file mode 100644 index 00000000..08c3da22 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/pbr_json.py @@ -0,0 +1,34 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +from pbr import git + + +def write_pbr_json(cmd, basename, filename): + if not hasattr(cmd.distribution, 'pbr') or not cmd.distribution.pbr: + return + git_dir = git._run_git_functions() + if not git_dir: + return + values = dict() + git_version = git.get_git_short_sha(git_dir) + is_release = git.get_is_release(git_dir) + if git_version is not None: + values['git_version'] = git_version + values['is_release'] = is_release + cmd.write_file('pbr', filename, json.dumps(values, sort_keys=True)) diff --git a/pbr-0.43.11-py2.7.egg/pbr/testr_command.py b/pbr-0.43.11-py2.7.egg/pbr/testr_command.py new file mode 100644 index 00000000..d143565f --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/testr_command.py @@ -0,0 +1,167 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (c) 2013 Testrepository Contributors +# +# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause +# license at the users choice. A copy of both licenses are available in the +# project source as Apache-2.0 and BSD. You may not use this file except in +# compliance with one of these two licences. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# license you chose for the specific language governing permissions and +# limitations under that license. + +"""setuptools/distutils command to run testr via setup.py + +PBR will hook in the Testr class to provide "setup.py test" when +.testr.conf is present in the repository (see pbr/hooks/commands.py). + +If we are activated but testrepository is not installed, we provide a +sensible error. + +You can pass --coverage which will also export PYTHON='coverage run +--source ' and automatically combine the coverage from +each testr backend test runner after the run completes. + +""" + +from distutils import cmd +import distutils.errors +import logging +import os +import sys +import warnings + +logger = logging.getLogger(__name__) + + +class TestrReal(cmd.Command): + + description = "DEPRECATED: Run unit tests using testr" + + user_options = [ + ('coverage', None, "Replace PYTHON with coverage and merge coverage " + "from each testr worker."), + ('testr-args=', 't', "Run 'testr' with these args"), + ('omit=', 'o', "Files to omit from coverage calculations"), + ('coverage-package-name=', None, "Use this name to select packages " + "for coverage (one or more, " + "comma-separated)"), + ('slowest', None, "Show slowest test times after tests complete."), + ('no-parallel', None, "Run testr serially"), + ('log-level=', 'l', "Log level (default: info)"), + ] + + boolean_options = ['coverage', 'slowest', 'no_parallel'] + + def _run_testr(self, *args): + logger.debug("_run_testr called with args = %r", args) + return commands.run_argv([sys.argv[0]] + list(args), + sys.stdin, sys.stdout, sys.stderr) + + def initialize_options(self): + self.testr_args = None + self.coverage = None + self.omit = "" + self.slowest = None + self.coverage_package_name = None + self.no_parallel = None + self.log_level = 'info' + + def finalize_options(self): + self.log_level = getattr( + logging, + self.log_level.upper(), + logging.INFO) + logging.basicConfig(level=self.log_level) + logger.debug("finalize_options called") + if self.testr_args is None: + self.testr_args = [] + else: + self.testr_args = self.testr_args.split() + if self.omit: + self.omit = "--omit=%s" % self.omit + logger.debug("finalize_options: self.__dict__ = %r", self.__dict__) + + def run(self): + """Set up testr repo, then run testr.""" + logger.debug("run called") + + warnings.warn('testr integration in pbr is deprecated. Please use ' + 'the \'testr\' setup command or call testr directly', + DeprecationWarning) + + if not os.path.isdir(".testrepository"): + self._run_testr("init") + + if self.coverage: + self._coverage_before() + if not self.no_parallel: + testr_ret = self._run_testr("run", "--parallel", *self.testr_args) + else: + testr_ret = self._run_testr("run", *self.testr_args) + if testr_ret: + raise distutils.errors.DistutilsError( + "testr failed (%d)" % testr_ret) + if self.slowest: + print("Slowest Tests") + self._run_testr("slowest") + if self.coverage: + self._coverage_after() + + def _coverage_before(self): + logger.debug("_coverage_before called") + package = self.distribution.get_name() + if package.startswith('python-'): + package = package[7:] + + # Use this as coverage package name + if self.coverage_package_name: + package = self.coverage_package_name + options = "--source %s --parallel-mode" % package + os.environ['PYTHON'] = ("coverage run %s" % options) + logger.debug("os.environ['PYTHON'] = %r", os.environ['PYTHON']) + + def _coverage_after(self): + logger.debug("_coverage_after called") + os.system("coverage combine") + os.system("coverage html -d ./cover %s" % self.omit) + os.system("coverage xml -o ./cover/coverage.xml %s" % self.omit) + + +class TestrFake(cmd.Command): + description = "Run unit tests using testr" + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + print("Install testrepository to run 'testr' command properly.") + + +try: + from testrepository import commands + have_testr = True + Testr = TestrReal +except ImportError: + have_testr = False + Testr = TestrFake diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/__init__.py b/pbr-0.43.11-py2.7.egg/pbr/tests/__init__.py new file mode 100644 index 00000000..583e0c6b --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/__init__.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import testscenarios + + +def load_tests(loader, standard_tests, pattern): + # top level directory cached on loader instance + this_dir = os.path.dirname(__file__) + package_tests = loader.discover(start_dir=this_dir, pattern=pattern) + result = loader.suiteClass() + result.addTests(testscenarios.generate_scenarios(standard_tests)) + result.addTests(testscenarios.generate_scenarios(package_tests)) + return result diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/base.py b/pbr-0.43.11-py2.7.egg/pbr/tests/base.py new file mode 100644 index 00000000..9c409b0a --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/base.py @@ -0,0 +1,221 @@ +# Copyright 2010-2011 OpenStack Foundation +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +"""Common utilities used in testing""" + +import os +import shutil +import subprocess +import sys + +import fixtures +import testresources +import testtools +from testtools import content + +from pbr import options + + +class DiveDir(fixtures.Fixture): + """Dive into given directory and return back on cleanup. + + :ivar path: The target directory. + """ + + def __init__(self, path): + self.path = path + + def setUp(self): + super(DiveDir, self).setUp() + self.addCleanup(os.chdir, os.getcwd()) + os.chdir(self.path) + + +class BaseTestCase(testtools.TestCase, testresources.ResourcedTestCase): + + def setUp(self): + super(BaseTestCase, self).setUp() + test_timeout = os.environ.get('OS_TEST_TIMEOUT', 30) + try: + test_timeout = int(test_timeout) + except ValueError: + # If timeout value is invalid, fail hard. + print("OS_TEST_TIMEOUT set to invalid value" + " defaulting to no timeout") + test_timeout = 0 + if test_timeout > 0: + self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) + + if os.environ.get('OS_STDOUT_CAPTURE') in options.TRUE_VALUES: + stdout = self.useFixture(fixtures.StringStream('stdout')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) + if os.environ.get('OS_STDERR_CAPTURE') in options.TRUE_VALUES: + stderr = self.useFixture(fixtures.StringStream('stderr')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) + self.log_fixture = self.useFixture( + fixtures.FakeLogger('pbr')) + + # Older git does not have config --local, so create a temporary home + # directory to permit using git config --global without stepping on + # developer configuration. + self.useFixture(fixtures.TempHomeDir()) + self.useFixture(fixtures.NestedTempfile()) + self.useFixture(fixtures.FakeLogger()) + # TODO(lifeless) we should remove PBR_VERSION from the environment. + # rather than setting it, because thats not representative - we need to + # test non-preversioned codepaths too! + self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION', '0.0')) + + self.temp_dir = self.useFixture(fixtures.TempDir()).path + self.package_dir = os.path.join(self.temp_dir, 'testpackage') + shutil.copytree(os.path.join(os.path.dirname(__file__), 'testpackage'), + self.package_dir) + self.addCleanup(os.chdir, os.getcwd()) + os.chdir(self.package_dir) + self.addCleanup(self._discard_testpackage) + # Tests can opt into non-PBR_VERSION by setting preversioned=False as + # an attribute. + if not getattr(self, 'preversioned', True): + self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION')) + setup_cfg_path = os.path.join(self.package_dir, 'setup.cfg') + with open(setup_cfg_path, 'rt') as cfg: + content = cfg.read() + content = content.replace(u'version = 0.1.dev', u'') + with open(setup_cfg_path, 'wt') as cfg: + cfg.write(content) + + def _discard_testpackage(self): + # Remove pbr.testpackage from sys.modules so that it can be freshly + # re-imported by the next test + for k in list(sys.modules): + if (k == 'pbr_testpackage' or + k.startswith('pbr_testpackage.')): + del sys.modules[k] + + def run_pbr(self, *args, **kwargs): + return self._run_cmd('pbr', args, **kwargs) + + def run_setup(self, *args, **kwargs): + return self._run_cmd(sys.executable, ('setup.py',) + args, **kwargs) + + def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None): + """Run a command in the root of the test working copy. + + Runs a command, with the given argument list, in the root of the test + working copy--returns the stdout and stderr streams and the exit code + from the subprocess. + + :param cwd: If falsy run within the test package dir, otherwise run + within the named path. + """ + cwd = cwd or self.package_dir + result = _run_cmd([cmd] + list(args), cwd=cwd) + if result[2] and not allow_fail: + raise Exception("Command failed retcode=%s" % result[2]) + return result + + +class CapturedSubprocess(fixtures.Fixture): + """Run a process and capture its output. + + :attr stdout: The output (a string). + :attr stderr: The standard error (a string). + :attr returncode: The return code of the process. + + Note that stdout and stderr are decoded from the bytestrings subprocess + returns using error=replace. + """ + + def __init__(self, label, *args, **kwargs): + """Create a CapturedSubprocess. + + :param label: A label for the subprocess in the test log. E.g. 'foo'. + :param *args: The *args to pass to Popen. + :param **kwargs: The **kwargs to pass to Popen. + """ + super(CapturedSubprocess, self).__init__() + self.label = label + self.args = args + self.kwargs = kwargs + self.kwargs['stderr'] = subprocess.PIPE + self.kwargs['stdin'] = subprocess.PIPE + self.kwargs['stdout'] = subprocess.PIPE + + def setUp(self): + super(CapturedSubprocess, self).setUp() + proc = subprocess.Popen(*self.args, **self.kwargs) + out, err = proc.communicate() + self.out = out.decode('utf-8', 'replace') + self.err = err.decode('utf-8', 'replace') + self.addDetail(self.label + '-stdout', content.text_content(self.out)) + self.addDetail(self.label + '-stderr', content.text_content(self.err)) + self.returncode = proc.returncode + if proc.returncode: + raise AssertionError('Failed process %s' % proc.returncode) + self.addCleanup(delattr, self, 'out') + self.addCleanup(delattr, self, 'err') + self.addCleanup(delattr, self, 'returncode') + + +def _run_cmd(args, cwd): + """Run the command args in cwd. + + :param args: The command to run e.g. ['git', 'status'] + :param cwd: The directory to run the comamnd in. + :return: ((stdout, stderr), returncode) + """ + p = subprocess.Popen( + args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, cwd=cwd) + streams = tuple(s.decode('latin1').strip() for s in p.communicate()) + for stream_content in streams: + print(stream_content) + return (streams) + (p.returncode,) + + +def _config_git(): + _run_cmd( + ['git', 'config', '--global', 'user.email', 'example@example.com'], + None) + _run_cmd( + ['git', 'config', '--global', 'user.name', 'OpenStack Developer'], + None) + _run_cmd( + ['git', 'config', '--global', 'user.signingkey', + 'example@example.com'], None) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_commands.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_commands.py new file mode 100644 index 00000000..51e27116 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_commands.py @@ -0,0 +1,84 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +from testtools import content + +from pbr.tests import base + + +class TestCommands(base.BaseTestCase): + def test_custom_build_py_command(self): + """Test custom build_py command. + + Test that a custom subclass of the build_py command runs when listed in + the commands [global] option, rather than the normal build command. + """ + + stdout, stderr, return_code = self.run_setup('build_py') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Running custom build_py command.', stdout) + self.assertEqual(0, return_code) + + def test_custom_deb_version_py_command(self): + """Test custom deb_version command.""" + stdout, stderr, return_code = self.run_setup('deb_version') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Extracting deb version', stdout) + self.assertEqual(0, return_code) + + def test_custom_rpm_version_py_command(self): + """Test custom rpm_version command.""" + stdout, stderr, return_code = self.run_setup('rpm_version') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Extracting rpm version', stdout) + self.assertEqual(0, return_code) + + def test_freeze_command(self): + """Test that freeze output is sorted in a case-insensitive manner.""" + stdout, stderr, return_code = self.run_pbr('freeze') + self.assertEqual(0, return_code) + pkgs = [] + for l in stdout.split('\n'): + pkgs.append(l.split('==')[0].lower()) + pkgs_sort = sorted(pkgs[:]) + self.assertEqual(pkgs_sort, pkgs) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_core.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_core.py new file mode 100644 index 00000000..0ee6f532 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_core.py @@ -0,0 +1,151 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import glob +import os +import tarfile + +import fixtures + +from pbr.tests import base + + +class TestCore(base.BaseTestCase): + + cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class') + + def check_script_install(self, install_stdout): + for cmd_name in self.cmd_names: + install_txt = 'Installing %s script to %s' % (cmd_name, + self.temp_dir) + self.assertIn(install_txt, install_stdout) + + cmd_filename = os.path.join(self.temp_dir, cmd_name) + + script_txt = open(cmd_filename, 'r').read() + self.assertNotIn('pkg_resources', script_txt) + + stdout, _, return_code = self._run_cmd(cmd_filename) + self.assertIn("PBR", stdout) + + def test_setup_py_keywords(self): + """setup.py --keywords. + + Test that the `./setup.py --keywords` command returns the correct + value without balking. + """ + + self.run_setup('egg_info') + stdout, _, _ = self.run_setup('--keywords') + assert stdout == 'packaging,distutils,setuptools' + + def test_setup_py_build_sphinx(self): + stdout, _, return_code = self.run_setup('build_sphinx') + self.assertEqual(0, return_code) + + def test_sdist_extra_files(self): + """Test that the extra files are correctly added.""" + + stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') + + # There can be only one + try: + tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] + except IndexError: + assert False, 'source dist not found' + + tf = tarfile.open(tf_path) + names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] + + self.assertIn('extra-file.txt', names) + + def test_console_script_install(self): + """Test that we install a non-pkg-resources console script.""" + + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install_scripts', '--install-dir=%s' % self.temp_dir) + + self.useFixture( + fixtures.EnvironmentVariable('PYTHONPATH', '.')) + + self.check_script_install(stdout) + + def test_console_script_develop(self): + """Test that we develop a non-pkg-resources console script.""" + + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + self.useFixture( + fixtures.EnvironmentVariable( + 'PYTHONPATH', ".:%s" % self.temp_dir)) + + stdout, _, return_code = self.run_setup( + 'develop', '--install-dir=%s' % self.temp_dir) + + self.check_script_install(stdout) + + +class TestGitSDist(base.BaseTestCase): + + def setUp(self): + super(TestGitSDist, self).setUp() + + stdout, _, return_code = self._run_cmd('git', ('init',)) + if return_code: + self.skipTest("git not installed") + + stdout, _, return_code = self._run_cmd('git', ('add', '.')) + stdout, _, return_code = self._run_cmd( + 'git', ('commit', '-m', 'Turn this into a git repo')) + + stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') + + def test_sdist_git_extra_files(self): + """Test that extra files found in git are correctly added.""" + # There can be only one + tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] + tf = tarfile.open(tf_path) + names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] + + self.assertIn('git-extra-file.txt', names) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_files.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_files.py new file mode 100644 index 00000000..e60b6ca7 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_files.py @@ -0,0 +1,78 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import print_function + +import os + +import fixtures + +from pbr.hooks import files +from pbr.tests import base + + +class FilesConfigTest(base.BaseTestCase): + + def setUp(self): + super(FilesConfigTest, self).setUp() + + pkg_fixture = fixtures.PythonPackage( + "fake_package", [ + ("fake_module.py", b""), + ("other_fake_module.py", b""), + ]) + self.useFixture(pkg_fixture) + pkg_etc = os.path.join(pkg_fixture.base, 'etc') + pkg_sub = os.path.join(pkg_etc, 'sub') + subpackage = os.path.join( + pkg_fixture.base, 'fake_package', 'subpackage') + os.makedirs(pkg_sub) + os.makedirs(subpackage) + with open(os.path.join(pkg_etc, "foo"), 'w') as foo_file: + foo_file.write("Foo Data") + with open(os.path.join(pkg_sub, "bar"), 'w') as foo_file: + foo_file.write("Bar Data") + with open(os.path.join(subpackage, "__init__.py"), 'w') as foo_file: + foo_file.write("# empty") + + self.useFixture(base.DiveDir(pkg_fixture.base)) + + def test_implicit_auto_package(self): + config = dict( + files=dict( + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn('subpackage', config['files']['packages']) + + def test_auto_package(self): + config = dict( + files=dict( + packages='fake_package', + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn('subpackage', config['files']['packages']) + + def test_data_files_globbing(self): + config = dict( + files=dict( + data_files="\n etc/pbr = etc/*" + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn( + '\netc/pbr/ = \n etc/foo\netc/pbr/sub = \n etc/sub/bar', + config['files']['data_files']) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_hooks.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_hooks.py new file mode 100644 index 00000000..0fcf96ca --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_hooks.py @@ -0,0 +1,72 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import os + +from testtools import matchers + +from pbr.tests import base +from pbr.tests import util + + +class TestHooks(base.BaseTestCase): + def setUp(self): + super(TestHooks, self).setUp() + with util.open_config( + os.path.join(self.package_dir, 'setup.cfg')) as cfg: + cfg.set('global', 'setup-hooks', + 'pbr_testpackage._setup_hooks.test_hook_1\n' + 'pbr_testpackage._setup_hooks.test_hook_2') + + def test_global_setup_hooks(self): + """Test setup_hooks. + + Test that setup_hooks listed in the [global] section of setup.cfg are + executed in order. + """ + + stdout, _, return_code = self.run_setup('egg_info') + assert 'test_hook_1\ntest_hook_2' in stdout + assert return_code == 0 + + def test_custom_commands_known(self): + stdout, _, return_code = self.run_setup('--help-commands') + self.assertFalse(return_code) + self.assertThat(stdout, matchers.Contains(" testr ")) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_integration.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_integration.py new file mode 100644 index 00000000..8e96f21f --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_integration.py @@ -0,0 +1,269 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import shlex +import sys + +import fixtures +import testtools +import textwrap + +from pbr.tests import base +from pbr.tests import test_packaging + +PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', '')) +PIPVERSION = os.environ.get('PIPVERSION', 'pip') +PBRVERSION = os.environ.get('PBRVERSION', 'pbr') +REPODIR = os.environ.get('REPODIR', '') +WHEELHOUSE = os.environ.get('WHEELHOUSE', '') +PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE] +PROJECTS = shlex.split(os.environ.get('PROJECTS', '')) +PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) + + +def all_projects(): + if not REPODIR: + return + # Future: make this path parameterisable. + excludes = set(['tempest', 'requirements']) + for name in PROJECTS: + name = name.strip() + short_name = name.split('/')[-1] + try: + with open(os.path.join( + REPODIR, short_name, 'setup.py'), 'rt') as f: + if 'pbr' not in f.read(): + continue + except IOError: + continue + if short_name in excludes: + continue + yield (short_name, dict(name=name, short_name=short_name)) + + +class TestIntegration(base.BaseTestCase): + + scenarios = list(all_projects()) + + def setUp(self): + # Integration tests need a higher default - big repos can be slow to + # clone, particularly under guest load. + env = fixtures.EnvironmentVariable( + 'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600')) + with env: + super(TestIntegration, self).setUp() + base._config_git() + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_integration(self): + # Test that we can: + # - run sdist from the repo in a venv + # - install the resulting tarball in a new venv + # - pip install the repo + # - pip install -e the repo + # We don't break these into separate tests because we'd need separate + # source dirs to isolate from side effects of running pip, and the + # overheads of setup would start to beat the benefits of parallelism. + self.useFixture(base.CapturedSubprocess( + 'sync-req', + ['python', 'update.py', os.path.join(REPODIR, self.short_name)], + cwd=os.path.join(REPODIR, 'requirements'))) + self.useFixture(base.CapturedSubprocess( + 'commit-requirements', + 'git diff --quiet || git commit -amrequirements', + cwd=os.path.join(REPODIR, self.short_name), shell=True)) + path = os.path.join( + self.useFixture(fixtures.TempDir()).path, 'project') + self.useFixture(base.CapturedSubprocess( + 'clone', + ['git', 'clone', os.path.join(REPODIR, self.short_name), path])) + venv = self.useFixture( + test_packaging.Venv('sdist', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'sdist', [python, 'setup.py', 'sdist'], cwd=path)) + venv = self.useFixture( + test_packaging.Venv('tarball', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + python = venv.python + filename = os.path.join( + path, 'dist', os.listdir(os.path.join(path, 'dist'))[0]) + self.useFixture(base.CapturedSubprocess( + 'tarball', [python] + PIP_CMD + [filename])) + venv = self.useFixture( + test_packaging.Venv('install-git', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + root = venv.path + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'install-git', [python] + PIP_CMD + ['git+file://' + path])) + if self.short_name == 'nova': + found = False + for _, _, filenames in os.walk(root): + if 'migrate.cfg' in filenames: + found = True + self.assertTrue(found) + venv = self.useFixture( + test_packaging.Venv('install-e', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + root = venv.path + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'install-e', [python] + PIP_CMD + ['-e', path])) + + +class TestInstallWithoutPbr(base.BaseTestCase): + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_install_without_pbr(self): + # Test easy-install of a thing that depends on a thing using pbr + tempdir = self.useFixture(fixtures.TempDir()).path + # A directory containing sdists of the things we're going to depend on + # in using-package. + dist_dir = os.path.join(tempdir, 'distdir') + os.mkdir(dist_dir) + self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), + allow_fail=False, cwd=PBR_ROOT) + # testpkg - this requires a pbr-using package + test_pkg_dir = os.path.join(tempdir, 'testpkg') + os.mkdir(test_pkg_dir) + pkgs = { + 'pkgTest': { + 'setup.py': textwrap.dedent("""\ + #!/usr/bin/env python + import setuptools + setuptools.setup( + name = 'pkgTest', + tests_require = ['pkgReq'], + test_suite='pkgReq' + ) + """), + 'setup.cfg': textwrap.dedent("""\ + [easy_install] + find_links = %s + """ % dist_dir)}, + 'pkgReq': { + 'requirements.txt': textwrap.dedent("""\ + pbr + """), + 'pkgReq/__init__.py': textwrap.dedent("""\ + print("FakeTest loaded and ran") + """)}, + } + pkg_dirs = self.useFixture( + test_packaging.CreatePackages(pkgs)).package_dirs + test_pkg_dir = pkg_dirs['pkgTest'] + req_pkg_dir = pkg_dirs['pkgReq'] + + self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), + allow_fail=False, cwd=req_pkg_dir) + # A venv to test within + venv = self.useFixture(test_packaging.Venv('nopbr', ['pip', 'wheel'])) + python = venv.python + # Run the depending script + self.useFixture(base.CapturedSubprocess( + 'nopbr', [python] + ['setup.py', 'test'], cwd=test_pkg_dir)) + + +class TestMarkersPip(base.BaseTestCase): + + scenarios = [ + ('pip-1.5', {'modules': ['pip>=1.5,<1.6']}), + ('pip-6.0', {'modules': ['pip>=6.0,<6.1']}), + ('pip-latest', {'modules': ['pip']}), + ('setuptools-EL7', {'modules': ['pip==1.4.1', 'setuptools==0.9.8']}), + ('setuptools-Trusty', {'modules': ['pip==1.5', 'setuptools==2.2']}), + ('setuptools-minimum', {'modules': ['pip==1.5', 'setuptools==0.7.2']}), + ] + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_pip_versions(self): + pkgs = { + 'test_markers': + {'requirements.txt': textwrap.dedent("""\ + pkg_a; python_version=='1.2' + pkg_b; python_version!='1.2' + """)}, + 'pkg_a': {}, + 'pkg_b': {}, + } + pkg_dirs = self.useFixture( + test_packaging.CreatePackages(pkgs)).package_dirs + temp_dir = self.useFixture(fixtures.TempDir()).path + repo_dir = os.path.join(temp_dir, 'repo') + venv = self.useFixture(test_packaging.Venv('markers')) + bin_python = venv.python + os.mkdir(repo_dir) + for module in self.modules: + self._run_cmd( + bin_python, + ['-m', 'pip', 'install', '--upgrade', module], + cwd=venv.path, allow_fail=False) + for pkg in pkg_dirs: + self._run_cmd( + bin_python, ['setup.py', 'sdist', '-d', repo_dir], + cwd=pkg_dirs[pkg], allow_fail=False) + self._run_cmd( + bin_python, + ['-m', 'pip', 'install', '--no-index', '-f', repo_dir, + 'test_markers'], + cwd=venv.path, allow_fail=False) + self.assertIn('pkg-b', self._run_cmd( + bin_python, ['-m', 'pip', 'freeze'], cwd=venv.path, + allow_fail=False)[0]) + + +class TestLTSSupport(base.BaseTestCase): + + # These versions come from the versions installed from the 'virtualenv' + # command from the 'python-virtualenv' package. + scenarios = [ + ('EL7', {'modules': ['pip==1.4.1', 'setuptools==0.9.8'], + 'py3support': True}), # And EPEL6 + ('Trusty', {'modules': ['pip==1.5', 'setuptools==2.2'], + 'py3support': True}), + ('Jessie', {'modules': ['pip==1.5.6', 'setuptools==5.5.1'], + 'py3support': True}), + # Wheezy has pip1.1, which cannot be called with '-m pip' + # So we'll use a different version of pip here. + ('WheezyPrecise', {'modules': ['pip==1.4.1', 'setuptools==0.6c11'], + 'py3support': False}) + ] + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_lts_venv_default_versions(self): + if (sys.version_info[0] == 3 and not self.py3support): + self.skipTest('This combination will not install with py3, ' + 'skipping test') + venv = self.useFixture( + test_packaging.Venv('setuptools', modules=self.modules)) + bin_python = venv.python + pbr = 'file://%s#egg=pbr' % PBR_ROOT + # Installing PBR is a reasonable indication that we are not broken on + # this particular combination of setuptools and pip. + self._run_cmd(bin_python, ['-m', 'pip', 'install', pbr], + cwd=venv.path, allow_fail=False) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_packaging.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_packaging.py new file mode 100644 index 00000000..5512d5d6 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_packaging.py @@ -0,0 +1,939 @@ +# Copyright (c) 2013 New Dream Network, LLC (DreamHost) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import email +import email.errors +import imp +import os +import re +import sys +import sysconfig +import tempfile +import textwrap + +import fixtures +import mock +import pkg_resources +import six +import testscenarios +import testtools +from testtools import matchers +import virtualenv +import wheel.install + +from pbr import git +from pbr import packaging +from pbr.tests import base + + +PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) + + +class TestRepo(fixtures.Fixture): + """A git repo for testing with. + + Use of TempHomeDir with this fixture is strongly recommended as due to the + lack of config --local in older gits, it will write to the users global + configuration without TempHomeDir. + """ + + def __init__(self, basedir): + super(TestRepo, self).__init__() + self._basedir = basedir + + def setUp(self): + super(TestRepo, self).setUp() + base._run_cmd(['git', 'init', '.'], self._basedir) + base._config_git() + base._run_cmd(['git', 'add', '.'], self._basedir) + + def commit(self, message_content='test commit'): + files = len(os.listdir(self._basedir)) + path = self._basedir + '/%d' % files + open(path, 'wt').close() + base._run_cmd(['git', 'add', path], self._basedir) + base._run_cmd(['git', 'commit', '-m', message_content], self._basedir) + + def uncommit(self): + base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir) + + def tag(self, version): + base._run_cmd( + ['git', 'tag', '-sm', 'test tag', version], self._basedir) + + +class GPGKeyFixture(fixtures.Fixture): + """Creates a GPG key for testing. + + It's recommended that this be used in concert with a unique home + directory. + """ + + def setUp(self): + super(GPGKeyFixture, self).setUp() + tempdir = self.useFixture(fixtures.TempDir()) + gnupg_version_re = re.compile('^gpg\s.*\s([\d+])\.([\d+])\.([\d+])') + gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path) + for line in gnupg_version[0].split('\n'): + gnupg_version = gnupg_version_re.match(line) + if gnupg_version: + gnupg_version = (int(gnupg_version.group(1)), + int(gnupg_version.group(2)), + int(gnupg_version.group(3))) + break + else: + if gnupg_version is None: + gnupg_version = (0, 0, 0) + config_file = tempdir.path + '/key-config' + f = open(config_file, 'wt') + try: + if gnupg_version[0] == 2 and gnupg_version[1] >= 1: + f.write(""" + %no-protection + %transient-key + """) + f.write(""" + %no-ask-passphrase + Key-Type: RSA + Name-Real: Example Key + Name-Comment: N/A + Name-Email: example@example.com + Expire-Date: 2d + Preferences: (setpref) + %commit + """) + finally: + f.close() + # Note that --quick-random (--debug-quick-random in GnuPG 2.x) + # does not have a corresponding preferences file setting and + # must be passed explicitly on the command line instead + if gnupg_version[0] == 1: + gnupg_random = '--quick-random' + elif gnupg_version[0] >= 2: + gnupg_random = '--debug-quick-random' + else: + gnupg_random = '' + base._run_cmd( + ['gpg', '--gen-key', '--batch', gnupg_random, config_file], + tempdir.path) + + +class Venv(fixtures.Fixture): + """Create a virtual environment for testing with. + + :attr path: The path to the environment root. + :attr python: The path to the python binary in the environment. + """ + + def __init__(self, reason, modules=(), pip_cmd=None): + """Create a Venv fixture. + + :param reason: A human readable string to bake into the venv + file path to aid diagnostics in the case of failures. + :param modules: A list of modules to install, defaults to latest + pip, wheel, and the working copy of PBR. + :attr pip_cmd: A list to override the default pip_cmd passed to + python for installing base packages. + """ + self._reason = reason + if modules == (): + pbr = 'file://%s#egg=pbr' % PBR_ROOT + modules = ['pip', 'wheel', pbr] + self.modules = modules + if pip_cmd is None: + self.pip_cmd = ['-m', 'pip', 'install'] + else: + self.pip_cmd = pip_cmd + + def _setUp(self): + path = self.useFixture(fixtures.TempDir()).path + virtualenv.create_environment(path, clear=True) + python = os.path.join(path, 'bin', 'python') + command = [python] + self.pip_cmd + ['-U'] + if self.modules and len(self.modules) > 0: + command.extend(self.modules) + self.useFixture(base.CapturedSubprocess( + 'mkvenv-' + self._reason, command)) + self.addCleanup(delattr, self, 'path') + self.addCleanup(delattr, self, 'python') + self.path = path + self.python = python + return path, python + + +class CreatePackages(fixtures.Fixture): + """Creates packages from dict with defaults + + :param package_dirs: A dict of package name to directory strings + {'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'} + """ + + defaults = { + 'setup.py': textwrap.dedent(six.u("""\ + #!/usr/bin/env python + import setuptools + setuptools.setup( + setup_requires=['pbr'], + pbr=True, + ) + """)), + 'setup.cfg': textwrap.dedent(six.u("""\ + [metadata] + name = {pkg_name} + """)) + } + + def __init__(self, packages): + """Creates packages from dict with defaults + + :param packages: a dict where the keys are the package name and a + value that is a second dict that may be empty, containing keys of + filenames and a string value of the contents. + {'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'} + """ + self.packages = packages + + def _writeFile(self, directory, file_name, contents): + path = os.path.abspath(os.path.join(directory, file_name)) + path_dir = os.path.dirname(path) + if not os.path.exists(path_dir): + if path_dir.startswith(directory): + os.makedirs(path_dir) + else: + raise ValueError + with open(path, 'wt') as f: + f.write(contents) + + def _setUp(self): + tmpdir = self.useFixture(fixtures.TempDir()).path + package_dirs = {} + for pkg_name in self.packages: + pkg_path = os.path.join(tmpdir, pkg_name) + package_dirs[pkg_name] = pkg_path + os.mkdir(pkg_path) + for cf in ['setup.py', 'setup.cfg']: + if cf in self.packages[pkg_name]: + contents = self.packages[pkg_name].pop(cf) + else: + contents = self.defaults[cf].format(pkg_name=pkg_name) + self._writeFile(pkg_path, cf, contents) + + for cf in self.packages[pkg_name]: + self._writeFile(pkg_path, cf, self.packages[pkg_name][cf]) + self.useFixture(TestRepo(pkg_path)).commit() + self.addCleanup(delattr, self, 'package_dirs') + self.package_dirs = package_dirs + return package_dirs + + +class TestPackagingInGitRepoWithCommit(base.BaseTestCase): + + scenarios = [ + ('preversioned', dict(preversioned=True)), + ('postversioned', dict(preversioned=False)), + ] + + def setUp(self): + super(TestPackagingInGitRepoWithCommit, self).setUp() + self.repo = self.useFixture(TestRepo(self.package_dir)) + self.repo.commit() + + def test_authors(self): + self.run_setup('sdist', allow_fail=False) + # One commit, something should be in the authors list + with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: + body = f.read() + self.assertNotEqual(body, '') + + def test_changelog(self): + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + # One commit, something should be in the ChangeLog list + self.assertNotEqual(body, '') + + def test_changelog_handles_astrisk(self): + self.repo.commit(message_content="Allow *.openstack.org to work") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn('\*', body) + + def test_changelog_handles_dead_links_in_commit(self): + self.repo.commit(message_content="See os_ for to_do about qemu_.") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn('os\_', body) + self.assertIn('to\_do', body) + self.assertIn('qemu\_', body) + + def test_changelog_handles_backticks(self): + self.repo.commit(message_content="Allow `openstack.org` to `work") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn('\`', body) + + def test_manifest_exclude_honoured(self): + self.run_setup('sdist', allow_fail=False) + with open(os.path.join( + self.package_dir, + 'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f: + body = f.read() + self.assertThat( + body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py'))) + self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py')) + + def test_install_writes_changelog(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat(stdout, matchers.Contains('Generating ChangeLog')) + + +class TestExtrafileInstallation(base.BaseTestCase): + def test_install_glob(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat( + stdout, matchers.Contains('copying data_files/a.txt')) + self.expectThat( + stdout, matchers.Contains('copying data_files/b.txt')) + + +class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase): + + def setUp(self): + super(TestPackagingInGitRepoWithoutCommit, self).setUp() + self.useFixture(TestRepo(self.package_dir)) + self.run_setup('sdist', allow_fail=False) + + def test_authors(self): + # No commits, no authors in list + with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: + body = f.read() + self.assertEqual('\n', body) + + def test_changelog(self): + # No commits, nothing should be in the ChangeLog list + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertEqual('CHANGES\n=======\n\n', body) + + +class TestPackagingWheels(base.BaseTestCase): + + def setUp(self): + super(TestPackagingWheels, self).setUp() + self.useFixture(TestRepo(self.package_dir)) + # Build the wheel + self.run_setup('bdist_wheel', allow_fail=False) + # Slowly construct the path to the generated whl + dist_dir = os.path.join(self.package_dir, 'dist') + relative_wheel_filename = os.listdir(dist_dir)[0] + absolute_wheel_filename = os.path.join( + dist_dir, relative_wheel_filename) + wheel_file = wheel.install.WheelFile(absolute_wheel_filename) + wheel_name = wheel_file.parsed_filename.group('namever') + # Create a directory path to unpack the wheel to + self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name) + # Extract the wheel contents to the directory we just created + wheel_file.zipfile.extractall(self.extracted_wheel_dir) + wheel_file.zipfile.close() + + def test_data_directory_has_wsgi_scripts(self): + # Build the path to the scripts directory + scripts_dir = os.path.join( + self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts') + self.assertTrue(os.path.exists(scripts_dir)) + scripts = os.listdir(scripts_dir) + + self.assertIn('pbr_test_wsgi', scripts) + self.assertIn('pbr_test_wsgi_with_class', scripts) + self.assertNotIn('pbr_test_cmd', scripts) + self.assertNotIn('pbr_test_cmd_with_class', scripts) + + def test_generates_c_extensions(self): + built_package_dir = os.path.join( + self.extracted_wheel_dir, 'pbr_testpackage') + static_object_filename = 'testext.so' + soabi = get_soabi() + if soabi: + static_object_filename = 'testext.{0}.so'.format(soabi) + static_object_path = os.path.join( + built_package_dir, static_object_filename) + + self.assertTrue(os.path.exists(built_package_dir)) + self.assertTrue(os.path.exists(static_object_path)) + + +class TestPackagingHelpers(testtools.TestCase): + + def test_generate_script(self): + group = 'console_scripts' + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging', + attrs=('LocalInstallScripts',)) + header = '#!/usr/bin/env fake-header\n' + template = ('%(group)s %(module_name)s %(import_target)s ' + '%(invoke_target)s') + + generated_script = packaging.generate_script( + group, entry_point, header, template) + + expected_script = ( + '#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging ' + 'LocalInstallScripts LocalInstallScripts' + ) + self.assertEqual(expected_script, generated_script) + + def test_generate_script_validates_expectations(self): + group = 'console_scripts' + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging') + header = '#!/usr/bin/env fake-header\n' + template = ('%(group)s %(module_name)s %(import_target)s ' + '%(invoke_target)s') + self.assertRaises( + ValueError, packaging.generate_script, group, entry_point, header, + template) + + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging', + attrs=('attr1', 'attr2', 'attr3')) + self.assertRaises( + ValueError, packaging.generate_script, group, entry_point, header, + template) + + +class TestPackagingInPlainDirectory(base.BaseTestCase): + + def setUp(self): + super(TestPackagingInPlainDirectory, self).setUp() + + def test_authors(self): + self.run_setup('sdist', allow_fail=False) + # Not a git repo, no AUTHORS file created + filename = os.path.join(self.package_dir, 'AUTHORS') + self.assertFalse(os.path.exists(filename)) + + def test_changelog(self): + self.run_setup('sdist', allow_fail=False) + # Not a git repo, no ChangeLog created + filename = os.path.join(self.package_dir, 'ChangeLog') + self.assertFalse(os.path.exists(filename)) + + def test_install_no_ChangeLog(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat( + stdout, matchers.Not(matchers.Contains('Generating ChangeLog'))) + + +class TestPresenceOfGit(base.BaseTestCase): + + def testGitIsInstalled(self): + with mock.patch.object(git, + '_run_shell_command') as _command: + _command.return_value = 'git version 1.8.4.1' + self.assertEqual(True, git._git_is_installed()) + + def testGitIsNotInstalled(self): + with mock.patch.object(git, + '_run_shell_command') as _command: + _command.side_effect = OSError + self.assertEqual(False, git._git_is_installed()) + + +class ParseRequirementsTest(base.BaseTestCase): + + def test_empty_requirements(self): + actual = packaging.parse_requirements([]) + self.assertEqual([], actual) + + def test_default_requirements(self): + """Ensure default files used if no files provided.""" + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + with open(requirements, 'w') as f: + f.write('pbr') + # the defaults are relative to where pbr is called from so we need to + # override them. This is OK, however, as we want to validate that + # defaults are used - not what those defaults are + with mock.patch.object(packaging, 'REQUIREMENTS_FILES', ( + requirements,)): + result = packaging.parse_requirements() + self.assertEqual(['pbr'], result) + + def test_override_with_env(self): + """Ensure environment variable used if no files provided.""" + _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') + with open(tmp_file, 'w') as fh: + fh.write("foo\nbar") + self.useFixture( + fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file)) + self.assertEqual(['foo', 'bar'], + packaging.parse_requirements()) + + def test_override_with_env_multiple_files(self): + _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') + with open(tmp_file, 'w') as fh: + fh.write("foo\nbar") + self.useFixture( + fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', + "no-such-file," + tmp_file)) + self.assertEqual(['foo', 'bar'], + packaging.parse_requirements()) + + def test_index_present(self): + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + with open(requirements, 'w') as f: + f.write('-i https://myindex.local') + f.write(' --index-url https://myindex.local') + f.write(' --extra-index-url https://myindex.local') + result = packaging.parse_requirements([requirements]) + self.assertEqual([], result) + + def test_nested_requirements(self): + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + nested = os.path.join(tempdir, 'nested.txt') + with open(requirements, 'w') as f: + f.write('-r ' + nested) + with open(nested, 'w') as f: + f.write('pbr') + result = packaging.parse_requirements([requirements]) + self.assertEqual(['pbr'], result) + + @mock.patch('warnings.warn') + def test_python_version(self, mock_warn): + with open("requirements-py%d.txt" % sys.version_info[0], + "w") as fh: + fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") + self.assertEqual(['foobar', 'foobaz'], + packaging.parse_requirements()) + mock_warn.assert_called_once_with(mock.ANY, DeprecationWarning) + + @mock.patch('warnings.warn') + def test_python_version_multiple_options(self, mock_warn): + with open("requirements-py1.txt", "w") as fh: + fh.write("thisisatrap") + with open("requirements-py%d.txt" % sys.version_info[0], + "w") as fh: + fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") + self.assertEqual(['foobar', 'foobaz'], + packaging.parse_requirements()) + # even though we have multiple offending files, this should only be + # called once + mock_warn.assert_called_once_with(mock.ANY, DeprecationWarning) + + +class ParseRequirementsTestScenarios(base.BaseTestCase): + + versioned_scenarios = [ + ('non-versioned', {'versioned': False, 'expected': ['bar']}), + ('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']}) + ] + + scenarios = [ + ('normal', {'url': "foo\nbar", 'expected': ['foo', 'bar']}), + ('normal_with_comments', { + 'url': "# this is a comment\nfoo\n# and another one\nbar", + 'expected': ['foo', 'bar']}), + ('removes_index_lines', {'url': '-f foobar', 'expected': []}), + ] + + scenarios = scenarios + testscenarios.multiply_scenarios([ + ('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}), + ('git_https_egg_url', {'url': 'git+https://foo.com/zipball#egg=bar'}), + ('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}), + ], versioned_scenarios) + + scenarios = scenarios + testscenarios.multiply_scenarios( + [ + ('git_egg_url', + {'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'}) + ], [ + ('non-editable', {'editable': False}), + ('editable', {'editable': True}), + ], + versioned_scenarios) + + def test_parse_requirements(self): + tmp_file = tempfile.NamedTemporaryFile() + req_string = self.url + if hasattr(self, 'editable') and self.editable: + req_string = ("-e %s" % req_string) + if hasattr(self, 'versioned') and self.versioned: + req_string = ("%s-1.2.3" % req_string) + with open(tmp_file.name, 'w') as fh: + fh.write(req_string) + self.assertEqual(self.expected, + packaging.parse_requirements([tmp_file.name])) + + +class ParseDependencyLinksTest(base.BaseTestCase): + + def setUp(self): + super(ParseDependencyLinksTest, self).setUp() + _, self.tmp_file = tempfile.mkstemp(prefix="openstack", + suffix=".setup") + + def test_parse_dependency_normal(self): + with open(self.tmp_file, "w") as fh: + fh.write("http://test.com\n") + self.assertEqual( + ["http://test.com"], + packaging.parse_dependency_links([self.tmp_file])) + + def test_parse_dependency_with_git_egg_url(self): + with open(self.tmp_file, "w") as fh: + fh.write("-e git://foo.com/zipball#egg=bar") + self.assertEqual( + ["git://foo.com/zipball#egg=bar"], + packaging.parse_dependency_links([self.tmp_file])) + + +class TestVersions(base.BaseTestCase): + + scenarios = [ + ('preversioned', dict(preversioned=True)), + ('postversioned', dict(preversioned=False)), + ] + + def setUp(self): + super(TestVersions, self).setUp() + self.repo = self.useFixture(TestRepo(self.package_dir)) + self.useFixture(GPGKeyFixture()) + self.useFixture(base.DiveDir(self.package_dir)) + + def test_email_parsing_errors_are_handled(self): + mocked_open = mock.mock_open() + with mock.patch('pbr.packaging.open', mocked_open): + with mock.patch('email.message_from_file') as message_from_file: + message_from_file.side_effect = [ + email.errors.MessageError('Test'), + {'Name': 'pbr_testpackage'}] + version = packaging._get_version_from_pkg_metadata( + 'pbr_testpackage') + + self.assertTrue(message_from_file.called) + self.assertIsNone(version) + + def test_capitalized_headers(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('Sem-Ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_capitalized_headers_partial(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('Sem-ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_tagged_version_has_tag_version(self): + self.repo.commit() + self.repo.tag('1.2.3') + version = packaging._get_version_from_git('1.2.3') + self.assertEqual('1.2.3', version) + + def test_non_canonical_tagged_version_bump(self): + self.repo.commit() + self.repo.tag('1.4') + self.repo.commit('Sem-Ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_untagged_version_has_dev_version_postversion(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) + + def test_untagged_pre_release_has_pre_dev_version_postversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) + + def test_untagged_version_minor_bump(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: deprecation') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.3.0.dev1')) + + def test_untagged_version_major_bump(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_untagged_version_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + version = packaging._get_version_from_git('1.2.5') + self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) + + def test_untagged_version_after_pre_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git('1.2.5') + self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) + + def test_untagged_version_after_rc_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git('1.2.3') + self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) + + def test_preversion_too_low_simple(self): + # That is, the target version is either already released or not high + # enough for the semver requirements given api breaks etc. + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + # Note that we can't target 1.2.3 anymore - with 1.2.3 released we + # need to be working on 1.2.4. + err = self.assertRaises( + ValueError, packaging._get_version_from_git, '1.2.3') + self.assertThat(err.args[0], matchers.StartsWith('git history')) + + def test_preversion_too_low_semver_headers(self): + # That is, the target version is either already released or not high + # enough for the semver requirements given api breaks etc. + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: feature') + # Note that we can't target 1.2.4, the feature header means we need + # to be working on 1.3.0 or above. + err = self.assertRaises( + ValueError, packaging._get_version_from_git, '1.2.4') + self.assertThat(err.args[0], matchers.StartsWith('git history')) + + def test_get_kwargs_corner_cases(self): + # No tags: + git_dir = self.repo._basedir + '/.git' + get_kwargs = lambda tag: packaging._get_increment_kwargs(git_dir, tag) + + def _check_combinations(tag): + self.repo.commit() + self.assertEqual(dict(), get_kwargs(tag)) + self.repo.commit('sem-ver: bugfix') + self.assertEqual(dict(), get_kwargs(tag)) + self.repo.commit('sem-ver: feature') + self.assertEqual(dict(minor=True), get_kwargs(tag)) + self.repo.uncommit() + self.repo.commit('sem-ver: deprecation') + self.assertEqual(dict(minor=True), get_kwargs(tag)) + self.repo.uncommit() + self.repo.commit('sem-ver: api-break') + self.assertEqual(dict(major=True), get_kwargs(tag)) + self.repo.commit('sem-ver: deprecation') + self.assertEqual(dict(major=True, minor=True), get_kwargs(tag)) + _check_combinations('') + self.repo.tag('1.2.3') + _check_combinations('1.2.3') + + def test_invalid_tag_ignored(self): + # Fix for bug 1356784 - we treated any tag as a version, not just those + # that are valid versions. + self.repo.commit() + self.repo.tag('1') + self.repo.commit() + # when the tree is tagged and its wrong: + self.repo.tag('badver') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.0.1.dev1')) + # When the tree isn't tagged, we also fall through. + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.0.1.dev2')) + # We don't fall through x.y versions + self.repo.commit() + self.repo.tag('1.2') + self.repo.commit() + self.repo.tag('badver2') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.1.dev1')) + # Or x.y.z versions + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + self.repo.tag('badver3') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) + # Or alpha/beta/pre versions + self.repo.commit() + self.repo.tag('1.2.4.0a1') + self.repo.commit() + self.repo.tag('badver4') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.0a2.dev1')) + # Non-release related tags are ignored. + self.repo.commit() + self.repo.tag('2') + self.repo.commit() + self.repo.tag('non-release-tag/2014.12.16-1') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.1.dev1')) + + def test_valid_tag_honoured(self): + # Fix for bug 1370608 - we converted any target into a 'dev version' + # even if there was a distance of 0 - indicating that we were on the + # tag itself. + self.repo.commit() + self.repo.tag('1.3.0.0a1') + version = packaging._get_version_from_git() + self.assertEqual('1.3.0.0a1', version) + + def test_skip_write_git_changelog(self): + # Fix for bug 1467440 + self.repo.commit() + self.repo.tag('1.2.3') + os.environ['SKIP_WRITE_GIT_CHANGELOG'] = '1' + version = packaging._get_version_from_git('1.2.3') + self.assertEqual('1.2.3', version) + + def tearDown(self): + super(TestVersions, self).tearDown() + os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None) + + +class TestRequirementParsing(base.BaseTestCase): + + def test_requirement_parsing(self): + pkgs = { + 'test_reqparse': + { + 'requirements.txt': textwrap.dedent("""\ + bar + quux<1.0; python_version=='2.6' + requests-aws>=0.1.4 # BSD License (3 clause) + Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' + requests-kerberos>=0.6;python_version=='2.7' # MIT + """), + 'setup.cfg': textwrap.dedent("""\ + [metadata] + name = test_reqparse + + [extras] + test = + foo + baz>3.2 :python_version=='2.7' # MIT + bar>3.3 :python_version=='2.7' # MIT # Apache + """)}, + } + pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs + pkg_dir = pkg_dirs['test_reqparse'] + # pkg_resources.split_sections uses None as the title of an + # anonymous section instead of the empty string. Weird. + expected_requirements = { + None: ['bar', 'requests-aws>=0.1.4'], + ":(python_version=='2.6')": ['quux<1.0'], + ":(python_version=='2.7')": ['Routes!=2.0,!=2.1,>=1.12.3', + 'requests-kerberos>=0.6'], + 'test': ['foo'], + "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] + } + venv = self.useFixture(Venv('reqParse')) + bin_python = venv.python + # Two things are tested by this + # 1) pbr properly parses markers from requiremnts.txt and setup.cfg + # 2) bdist_wheel causes pbr to not evaluate markers + self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'), + allow_fail=False, cwd=pkg_dir) + egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info') + + requires_txt = os.path.join(egg_info, 'requires.txt') + with open(requires_txt, 'rt') as requires: + generated_requirements = dict( + pkg_resources.split_sections(requires)) + + # NOTE(dhellmann): We have to spell out the comparison because + # the rendering for version specifiers in a range is not + # consistent across versions of setuptools. + + for section, expected in expected_requirements.items(): + exp_parsed = [ + pkg_resources.Requirement.parse(s) + for s in expected + ] + gen_parsed = [ + pkg_resources.Requirement.parse(s) + for s in generated_requirements[section] + ] + self.assertEqual(exp_parsed, gen_parsed) + + +def get_soabi(): + soabi = None + try: + soabi = sysconfig.get_config_var('SOABI') + arch = sysconfig.get_config_var('MULTIARCH') + except IOError: + pass + if soabi and arch and 'pypy' in sysconfig.get_scheme_names(): + soabi = '%s-%s' % (soabi, arch) + if soabi is None and 'pypy' in sysconfig.get_scheme_names(): + # NOTE(sigmavirus24): PyPy only added support for the SOABI config var + # to sysconfig in 2015. That was well after 2.2.1 was published in the + # Ubuntu 14.04 archive. + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.pypy') and suffix.endswith('.so'): + soabi = suffix.split('.')[1] + break + return soabi diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_pbr_json.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_pbr_json.py new file mode 100644 index 00000000..f0669713 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_pbr_json.py @@ -0,0 +1,30 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock + +from pbr import pbr_json +from pbr.tests import base + + +class TestJsonContent(base.BaseTestCase): + @mock.patch('pbr.git._run_git_functions', return_value=True) + @mock.patch('pbr.git.get_git_short_sha', return_value="123456") + @mock.patch('pbr.git.get_is_release', return_value=True) + def test_content(self, mock_get_is, mock_get_git, mock_run): + cmd = mock.Mock() + pbr_json.write_pbr_json(cmd, "basename", "pbr.json") + cmd.write_file.assert_called_once_with( + 'pbr', + 'pbr.json', + '{"git_version": "123456", "is_release": true}' + ) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_setup.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_setup.py new file mode 100644 index 00000000..85d40ebf --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_setup.py @@ -0,0 +1,445 @@ +# Copyright (c) 2011 OpenStack Foundation +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import print_function + +import os + +try: + import cStringIO as io + BytesIO = io.StringIO +except ImportError: + import io + BytesIO = io.BytesIO + +import fixtures + +from pbr import git +from pbr import options +from pbr import packaging +from pbr.tests import base + + +class SkipFileWrites(base.BaseTestCase): + + scenarios = [ + ('changelog_option_true', + dict(option_key='skip_changelog', option_value='True', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_option_false', + dict(option_key='skip_changelog', option_value='False', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_env_true', + dict(option_key='skip_changelog', option_value='False', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_both_true', + dict(option_key='skip_changelog', option_value='True', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('authors_option_true', + dict(option_key='skip_authors', option_value='True', + env_key='SKIP_GENERATE_AUTHORS', env_value=None, + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_option_false', + dict(option_key='skip_authors', option_value='False', + env_key='SKIP_GENERATE_AUTHORS', env_value=None, + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_env_true', + dict(option_key='skip_authors', option_value='False', + env_key='SKIP_GENERATE_AUTHORS', env_value='True', + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_both_true', + dict(option_key='skip_authors', option_value='True', + env_key='SKIP_GENERATE_AUTHORS', env_value='True', + pkg_func=git.generate_authors, filename='AUTHORS')), + ] + + def setUp(self): + super(SkipFileWrites, self).setUp() + self.temp_path = self.useFixture(fixtures.TempDir()).path + self.root_dir = os.path.abspath(os.path.curdir) + self.git_dir = os.path.join(self.root_dir, ".git") + if not os.path.exists(self.git_dir): + self.skipTest("%s is missing; skipping git-related checks" + % self.git_dir) + return + self.filename = os.path.join(self.temp_path, self.filename) + self.option_dict = dict() + if self.option_key is not None: + self.option_dict[self.option_key] = ('setup.cfg', + self.option_value) + self.useFixture( + fixtures.EnvironmentVariable(self.env_key, self.env_value)) + + def test_skip(self): + self.pkg_func(git_dir=self.git_dir, + dest_dir=self.temp_path, + option_dict=self.option_dict) + self.assertEqual( + not os.path.exists(self.filename), + (self.option_value.lower() in options.TRUE_VALUES + or self.env_value is not None)) + +_changelog_content = """7780758\x00Break parser\x00 (tag: refs/tags/1_foo.1) +04316fe\x00Make python\x00 (refs/heads/review/monty_taylor/27519) +378261a\x00Add an integration test script.\x00 +3c373ac\x00Merge "Lib\x00 (HEAD, tag: refs/tags/2013.2.rc2, tag: refs/tags/2013.2, refs/heads/mile-proposed) +182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: refs/tags/0.5.17) +fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: refs/tags/0.5.16) +d1c53dd\x00Use pip instead of easy_install for installation.\x00 +a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00 +6c27ce7\x00Skip git-checkout related tests when .git is missing\x00 +451e513\x00Bug fix: create_stack() fails when waiting\x00 +4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: refs/tags/(evil)) +d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: refs/tags/ev()il) +c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: refs/tags/ev)il) +8696fbd\x00Improve test coverage: private extension API\x00 (tag: refs/tags/ev(il) +f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: refs/tags/e(vi)l) +04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: refs/tags/(12), refs/heads/master) +a65e8ee\x00Remove jinja pin.\x00 (tag: refs/tags/0.5.14, tag: refs/tags/0.5.13) +""" # noqa + + +def _make_old_git_changelog_format(line): + """Convert post-1.8.1 git log format to pre-1.8.1 git log format""" + + if not line.strip(): + return line + sha, msg, refname = line.split('\x00') + refname = refname.replace('tag: ', '') + return '\x00'.join((sha, msg, refname)) + +_old_git_changelog_content = '\n'.join( + _make_old_git_changelog_format(line) + for line in _changelog_content.split('\n')) + + +class GitLogsTest(base.BaseTestCase): + + scenarios = [ + ('pre1.8.3', {'changelog': _old_git_changelog_content}), + ('post1.8.3', {'changelog': _changelog_content}), + ] + + def setUp(self): + super(GitLogsTest, self).setUp() + self.temp_path = self.useFixture(fixtures.TempDir()).path + self.root_dir = os.path.abspath(os.path.curdir) + self.git_dir = os.path.join(self.root_dir, ".git") + self.useFixture( + fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS')) + self.useFixture( + fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG')) + + def test_write_git_changelog(self): + self.useFixture(fixtures.FakePopen(lambda _: { + "stdout": BytesIO(self.changelog.encode('utf-8')) + })) + + git.write_git_changelog(git_dir=self.git_dir, + dest_dir=self.temp_path) + + with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh: + changelog_contents = ch_fh.read() + self.assertIn("2013.2", changelog_contents) + self.assertIn("0.5.17", changelog_contents) + self.assertIn("------", changelog_contents) + self.assertIn("Refactor hooks file", changelog_contents) + self.assertIn( + "Bug fix: create\_stack() fails when waiting", + changelog_contents) + self.assertNotIn("Refactor hooks file.", changelog_contents) + self.assertNotIn("182feb3", changelog_contents) + self.assertNotIn("review/monty_taylor/27519", changelog_contents) + self.assertNotIn("0.5.13", changelog_contents) + self.assertNotIn("0.6.7", changelog_contents) + self.assertNotIn("12", changelog_contents) + self.assertNotIn("(evil)", changelog_contents) + self.assertNotIn("ev()il", changelog_contents) + self.assertNotIn("ev(il", changelog_contents) + self.assertNotIn("ev)il", changelog_contents) + self.assertNotIn("e(vi)l", changelog_contents) + self.assertNotIn('Merge "', changelog_contents) + self.assertNotIn('1\_foo.1', changelog_contents) + + def test_generate_authors(self): + author_old = u"Foo Foo " + author_new = u"Bar Bar " + co_author = u"Foo Bar " + co_author_by = u"Co-authored-by: " + co_author + + git_log_cmd = ( + "git --git-dir=%s log --format=%%aN <%%aE>" + % self.git_dir) + git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir) + git_top_level = "git rev-parse --show-toplevel" + cmd_map = { + git_log_cmd: author_new, + git_co_log_cmd: co_author_by, + git_top_level: self.root_dir, + } + + exist_files = [self.git_dir, + os.path.join(self.temp_path, "AUTHORS.in")] + self.useFixture(fixtures.MonkeyPatch( + "os.path.exists", + lambda path: os.path.abspath(path) in exist_files)) + + def _fake_run_shell_command(cmd, **kwargs): + return cmd_map[" ".join(cmd)] + + self.useFixture(fixtures.MonkeyPatch( + "pbr.git._run_shell_command", + _fake_run_shell_command)) + + with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh: + auth_fh.write("%s\n" % author_old) + + git.generate_authors(git_dir=self.git_dir, + dest_dir=self.temp_path) + + with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh: + authors = auth_fh.read() + self.assertTrue(author_old in authors) + self.assertTrue(author_new in authors) + self.assertTrue(co_author in authors) + + +class _SphinxConfig(object): + man_pages = ['foo'] + + +class BaseSphinxTest(base.BaseTestCase): + + def setUp(self): + super(BaseSphinxTest, self).setUp() + + # setup_command requires the Sphinx instance to have some + # attributes that aren't set normally with the way we use the + # class (because we replace the constructor). Add default + # values directly to the class definition. + import sphinx.application + sphinx.application.Sphinx.messagelog = [] + sphinx.application.Sphinx.statuscode = 0 + + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.build", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.config", _SphinxConfig)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.init_values", lambda *a: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.__init__", lambda *a: None)) + from distutils import dist + self.distr = dist.Distribution() + self.distr.packages = ("fake_package",) + self.distr.command_options["build_sphinx"] = { + "source_dir": ["a", "."]} + pkg_fixture = fixtures.PythonPackage( + "fake_package", [("fake_module.py", b""), + ("another_fake_module_for_testing.py", b""), + ("fake_private_module.py", b"")]) + self.useFixture(pkg_fixture) + self.useFixture(base.DiveDir(pkg_fixture.base)) + self.distr.command_options["pbr"] = {} + if hasattr(self, "excludes"): + self.distr.command_options["pbr"]["autodoc_exclude_modules"] = ( + 'setup.cfg', + "fake_package.fake_private_module\n" + "fake_package.another_fake_*\n" + "fake_package.unknown_module") + if hasattr(self, 'has_opt') and self.has_opt: + options = self.distr.command_options["pbr"] + options["autodoc_index_modules"] = ('setup.cfg', self.autodoc) + + +class BuildSphinxTest(BaseSphinxTest): + + scenarios = [ + ('true_autodoc_caps', + dict(has_opt=True, autodoc='True', has_autodoc=True)), + ('true_autodoc_caps_with_excludes', + dict(has_opt=True, autodoc='True', has_autodoc=True, + excludes="fake_package.fake_private_module\n" + "fake_package.another_fake_*\n" + "fake_package.unknown_module")), + ('true_autodoc_lower', + dict(has_opt=True, autodoc='true', has_autodoc=True)), + ('false_autodoc', + dict(has_opt=True, autodoc='False', has_autodoc=False)), + ('no_autodoc', + dict(has_opt=False, autodoc='False', has_autodoc=False)), + ] + + def test_build_doc(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + self.assertTrue( + os.path.exists("api/autoindex.rst") == self.has_autodoc) + self.assertTrue( + os.path.exists( + "api/fake_package.fake_module.rst") == self.has_autodoc) + if not self.has_autodoc or hasattr(self, "excludes"): + assertion = self.assertFalse + else: + assertion = self.assertTrue + assertion( + os.path.exists( + "api/fake_package.fake_private_module.rst")) + assertion( + os.path.exists( + "api/fake_package.another_fake_module_for_testing.rst")) + + def test_builders_config(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.finalize_options() + + self.assertEqual(1, len(build_doc.builders)) + self.assertIn('html', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = '' + build_doc.finalize_options() + + self.assertEqual('', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = 'man' + build_doc.finalize_options() + + self.assertEqual(1, len(build_doc.builders)) + self.assertIn('man', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = 'html,man,doctest' + build_doc.finalize_options() + + self.assertIn('html', build_doc.builders) + self.assertIn('man', build_doc.builders) + self.assertIn('doctest', build_doc.builders) + + def test_cmd_builder_override(self): + + if self.has_opt: + self.distr.command_options["pbr"] = { + "autodoc_index_modules": ('setup.cfg', self.autodoc) + } + + self.distr.command_options["build_sphinx"]["builder"] = ( + "command line", "non-existing-builder") + + build_doc = packaging.LocalBuildDoc(self.distr) + self.assertNotIn('non-existing-builder', build_doc.builders) + self.assertIn('html', build_doc.builders) + + # process command line options which should override config + build_doc.finalize_options() + + self.assertIn('non-existing-builder', build_doc.builders) + self.assertNotIn('html', build_doc.builders) + + def test_cmd_builder_override_multiple_builders(self): + + if self.has_opt: + self.distr.command_options["pbr"] = { + "autodoc_index_modules": ('setup.cfg', self.autodoc) + } + + self.distr.command_options["build_sphinx"]["builder"] = ( + "command line", "builder1,builder2") + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.finalize_options() + + self.assertEqual(["builder1", "builder2"], build_doc.builders) + + +class APIAutoDocTest(base.BaseTestCase): + + def setUp(self): + super(APIAutoDocTest, self).setUp() + + # setup_command requires the Sphinx instance to have some + # attributes that aren't set normally with the way we use the + # class (because we replace the constructor). Add default + # values directly to the class definition. + import sphinx.application + sphinx.application.Sphinx.messagelog = [] + sphinx.application.Sphinx.statuscode = 0 + + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.build", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.config", _SphinxConfig)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.init_values", lambda *a: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.__init__", lambda *a: None)) + from distutils import dist + self.distr = dist.Distribution() + self.distr.packages = ("fake_package",) + self.distr.command_options["build_sphinx"] = { + "source_dir": ["a", "."]} + self.sphinx_options = self.distr.command_options["build_sphinx"] + pkg_fixture = fixtures.PythonPackage( + "fake_package", [("fake_module.py", b""), + ("another_fake_module_for_testing.py", b""), + ("fake_private_module.py", b"")]) + self.useFixture(pkg_fixture) + self.useFixture(base.DiveDir(pkg_fixture.base)) + self.pbr_options = self.distr.command_options.setdefault('pbr', {}) + self.pbr_options["autodoc_index_modules"] = ('setup.cfg', 'True') + + def test_default_api_build_dir(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + print('PBR OPTIONS:', self.pbr_options) + print('DISTR OPTIONS:', self.distr.command_options) + + self.assertTrue(os.path.exists("api/autoindex.rst")) + self.assertTrue(os.path.exists("api/fake_package.fake_module.rst")) + self.assertTrue( + os.path.exists( + "api/fake_package.fake_private_module.rst")) + self.assertTrue( + os.path.exists( + "api/fake_package.another_fake_module_for_testing.rst")) + + def test_different_api_build_dir(self): + # Options have to come out of the settings dict as a tuple + # showing the source and the value. + self.pbr_options['api_doc_dir'] = (None, 'contributor/api') + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + print('PBR OPTIONS:', self.pbr_options) + print('DISTR OPTIONS:', self.distr.command_options) + + self.assertTrue(os.path.exists("contributor/api/autoindex.rst")) + self.assertTrue( + os.path.exists("contributor/api/fake_package.fake_module.rst")) + self.assertTrue( + os.path.exists( + "contributor/api/fake_package.fake_private_module.rst")) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_util.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_util.py new file mode 100644 index 00000000..370a7dee --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_util.py @@ -0,0 +1,91 @@ +# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import io +import textwrap + +import six +from six.moves import configparser +import sys + +from pbr.tests import base +from pbr import util + + +class TestExtrasRequireParsingScenarios(base.BaseTestCase): + + scenarios = [ + ('simple_extras', { + 'config_text': """ + [extras] + first = + foo + bar==1.0 + second = + baz>=3.2 + foo + """, + 'expected_extra_requires': { + 'first': ['foo', 'bar==1.0'], + 'second': ['baz>=3.2', 'foo'], + 'test': ['requests-mock'], + "test:(python_version=='2.6')": ['ordereddict'], + } + }), + ('with_markers', { + 'config_text': """ + [extras] + test = + foo:python_version=='2.6' + bar + baz<1.6 :python_version=='2.6' + zaz :python_version>'1.0' + """, + 'expected_extra_requires': { + "test:(python_version=='2.6')": ['foo', 'baz<1.6'], + "test": ['bar', 'zaz']}}), + ('no_extras', { + 'config_text': """ + [metadata] + long_description = foo + """, + 'expected_extra_requires': + {} + })] + + def config_from_ini(self, ini): + config = {} + if sys.version_info >= (3, 2): + parser = configparser.ConfigParser() + else: + parser = configparser.SafeConfigParser() + ini = textwrap.dedent(six.u(ini)) + parser.readfp(io.StringIO(ini)) + for section in parser.sections(): + config[section] = dict(parser.items(section)) + return config + + def test_extras_parsing(self): + config = self.config_from_ini(self.config_text) + kwargs = util.setup_cfg_to_setup_kwargs(config) + + self.assertEqual(self.expected_extra_requires, + kwargs['extras_require']) + + +class TestInvalidMarkers(base.BaseTestCase): + + def test_invalid_marker_raises_error(self): + config = {'extras': {'test': "foo :bad_marker>'1.0'"}} + self.assertRaises(SyntaxError, util.setup_cfg_to_setup_kwargs, config) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_version.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_version.py new file mode 100644 index 00000000..d861d572 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_version.py @@ -0,0 +1,311 @@ +# Copyright 2012 Red Hat, Inc. +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import itertools + +from testtools import matchers + +from pbr.tests import base +from pbr import version + + +from_pip_string = version.SemanticVersion.from_pip_string + + +class TestSemanticVersion(base.BaseTestCase): + + def test_ordering(self): + ordered_versions = [ + "1.2.3.dev6", + "1.2.3.dev7", + "1.2.3.a4.dev12", + "1.2.3.a4.dev13", + "1.2.3.a4", + "1.2.3.a5.dev1", + "1.2.3.a5", + "1.2.3.b3.dev1", + "1.2.3.b3", + "1.2.3.rc2.dev1", + "1.2.3.rc2", + "1.2.3.rc3.dev1", + "1.2.3", + "1.2.4", + "1.3.3", + "2.2.3", + ] + for v in ordered_versions: + sv = version.SemanticVersion.from_pip_string(v) + self.expectThat(sv, matchers.Equals(sv)) + for left, right in itertools.combinations(ordered_versions, 2): + l_pos = ordered_versions.index(left) + r_pos = ordered_versions.index(right) + if l_pos < r_pos: + m1 = matchers.LessThan + m2 = matchers.GreaterThan + else: + m1 = matchers.GreaterThan + m2 = matchers.LessThan + left_sv = version.SemanticVersion.from_pip_string(left) + right_sv = version.SemanticVersion.from_pip_string(right) + self.expectThat(left_sv, m1(right_sv)) + self.expectThat(right_sv, m2(left_sv)) + + def test_from_pip_string_legacy_alpha(self): + expected = version.SemanticVersion( + 1, 2, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('1.2.0rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_postN(self): + # When pbr trunk was incompatible with PEP-440, a stable release was + # made that used postN versions to represent developer builds. As + # we expect only to be parsing versions of our own, we map those + # into dev builds of the next version. + expected = version.SemanticVersion(1, 2, 4, dev_count=5) + parsed = from_pip_string('1.2.3.post5') + self.expectThat(expected, matchers.Equals(parsed)) + expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) + parsed = from_pip_string('1.2.3.0a4.post6') + self.expectThat(expected, matchers.Equals(parsed)) + # We can't define a mapping for .postN.devM, so it should raise. + self.expectThat( + lambda: from_pip_string('1.2.3.post5.dev6'), + matchers.raises(ValueError)) + + def test_from_pip_string_v_version(self): + parsed = from_pip_string('v1.2.3') + expected = version.SemanticVersion(1, 2, 3) + self.expectThat(expected, matchers.Equals(parsed)) + + expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) + parsed = from_pip_string('V1.2.3.0a4.post6') + self.expectThat(expected, matchers.Equals(parsed)) + + self.expectThat( + lambda: from_pip_string('x1.2.3'), + matchers.raises(ValueError)) + + def test_from_pip_string_legacy_nonzero_lead_in(self): + # reported in bug 1361251 + expected = version.SemanticVersion( + 0, 0, 1, prerelease_type='a', prerelease=2) + parsed = from_pip_string('0.0.1a2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_short_nonzero_lead_in(self): + expected = version.SemanticVersion( + 0, 1, 0, prerelease_type='a', prerelease=2) + parsed = from_pip_string('0.1a2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_no_0_prerelease(self): + expected = version.SemanticVersion( + 2, 1, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('2.1.0.rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_no_0_prerelease_2(self): + expected = version.SemanticVersion( + 2, 0, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('2.0.0.rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_non_440_beta(self): + expected = version.SemanticVersion( + 2014, 2, prerelease_type='b', prerelease=2) + parsed = from_pip_string('2014.2.b2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_pure_git_hash(self): + self.assertRaises(ValueError, from_pip_string, '6eed5ae') + + def test_from_pip_string_non_digit_start(self): + self.assertRaises(ValueError, from_pip_string, + 'non-release-tag/2014.12.16-1') + + def test_final_version(self): + semver = version.SemanticVersion(1, 2, 3) + self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple()) + self.assertEqual("1.2.3", semver.brief_string()) + self.assertEqual("1.2.3", semver.debian_string()) + self.assertEqual("1.2.3", semver.release_string()) + self.assertEqual("1.2.3", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.3")) + + def test_parsing_short_forms(self): + semver = version.SemanticVersion(1, 0, 0) + self.assertEqual(semver, from_pip_string("1")) + self.assertEqual(semver, from_pip_string("1.0")) + self.assertEqual(semver, from_pip_string("1.0.0")) + + def test_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, dev_count=5) + self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~dev5", semver.debian_string()) + self.assertEqual("1.2.4.dev5", semver.release_string()) + self.assertEqual("1.2.3.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.dev5")) + + def test_dev_no_git_version(self): + semver = version.SemanticVersion(1, 2, 4, dev_count=5) + self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~dev5", semver.debian_string()) + self.assertEqual("1.2.4.dev5", semver.release_string()) + self.assertEqual("1.2.3.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.dev5")) + + def test_dev_zero_version(self): + semver = version.SemanticVersion(1, 2, 0, dev_count=5) + self.assertEqual((1, 2, 0, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.0", semver.brief_string()) + self.assertEqual("1.2.0~dev5", semver.debian_string()) + self.assertEqual("1.2.0.dev5", semver.release_string()) + self.assertEqual("1.1.9999.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.0.dev5")) + + def test_alpha_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a', 1, 12) + self.assertEqual((1, 2, 4, 'alphadev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0a1.dev12", semver.release_string()) + self.assertEqual("1.2.3.a1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a1.dev12")) + + def test_alpha_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a', 1) + self.assertEqual((1, 2, 4, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a1", semver.debian_string()) + self.assertEqual("1.2.4.0a1", semver.release_string()) + self.assertEqual("1.2.3.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a1")) + + def test_alpha_zero_version(self): + semver = version.SemanticVersion(1, 2, 0, 'a', 1) + self.assertEqual((1, 2, 0, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.2.0", semver.brief_string()) + self.assertEqual("1.2.0~a1", semver.debian_string()) + self.assertEqual("1.2.0.0a1", semver.release_string()) + self.assertEqual("1.1.9999.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.0.0a1")) + + def test_alpha_major_zero_version(self): + semver = version.SemanticVersion(1, 0, 0, 'a', 1) + self.assertEqual((1, 0, 0, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.0.0", semver.brief_string()) + self.assertEqual("1.0.0~a1", semver.debian_string()) + self.assertEqual("1.0.0.0a1", semver.release_string()) + self.assertEqual("0.9999.9999.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.0.0.0a1")) + + def test_alpha_default_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a') + self.assertEqual((1, 2, 4, 'alpha', 0), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a0", semver.debian_string()) + self.assertEqual("1.2.4.0a0", semver.release_string()) + self.assertEqual("1.2.3.a0", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a0")) + + def test_beta_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'b', 1, 12) + self.assertEqual((1, 2, 4, 'betadev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~b1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0b1.dev12", semver.release_string()) + self.assertEqual("1.2.3.b1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0b1.dev12")) + + def test_beta_version(self): + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual((1, 2, 4, 'beta', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~b1", semver.debian_string()) + self.assertEqual("1.2.4.0b1", semver.release_string()) + self.assertEqual("1.2.3.b1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0b1")) + + def test_decrement_nonrelease(self): + # The prior version of any non-release is a release + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual( + version.SemanticVersion(1, 2, 3), semver.decrement()) + + def test_decrement_nonrelease_zero(self): + # We set an arbitrary max version of 9999 when decrementing versions + # - this is part of handling rpm support. + semver = version.SemanticVersion(1, 0, 0) + self.assertEqual( + version.SemanticVersion(0, 9999, 9999), semver.decrement()) + + def test_decrement_release(self): + # The next patch version of a release version requires a change to the + # patch level. + semver = version.SemanticVersion(2, 2, 5) + self.assertEqual( + version.SemanticVersion(2, 2, 4), semver.decrement()) + + def test_increment_nonrelease(self): + # The next patch version of a non-release version is another + # non-release version as the next release doesn't need to be + # incremented. + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual( + version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment()) + # Major and minor increments however need to bump things. + self.assertEqual( + version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) + self.assertEqual( + version.SemanticVersion(2, 0, 0), semver.increment(major=True)) + + def test_increment_release(self): + # The next patch version of a release version requires a change to the + # patch level. + semver = version.SemanticVersion(1, 2, 5) + self.assertEqual( + version.SemanticVersion(1, 2, 6), semver.increment()) + self.assertEqual( + version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) + self.assertEqual( + version.SemanticVersion(2, 0, 0), semver.increment(major=True)) + + def test_rc_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12) + self.assertEqual((1, 2, 4, 'candidatedev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~rc1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0rc1.dev12", semver.release_string()) + self.assertEqual("1.2.3.rc1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0rc1.dev12")) + + def test_rc_version(self): + semver = version.SemanticVersion(1, 2, 4, 'rc', 1) + self.assertEqual((1, 2, 4, 'candidate', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~rc1", semver.debian_string()) + self.assertEqual("1.2.4.0rc1", semver.release_string()) + self.assertEqual("1.2.3.rc1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0rc1")) + + def test_to_dev(self): + self.assertEqual( + version.SemanticVersion(1, 2, 3, dev_count=1), + version.SemanticVersion(1, 2, 3).to_dev(1)) + self.assertEqual( + version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1), + version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1)) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/test_wsgi.py b/pbr-0.43.11-py2.7.egg/pbr/tests/test_wsgi.py new file mode 100644 index 00000000..f840610d --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/test_wsgi.py @@ -0,0 +1,163 @@ +# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import re +import subprocess +import sys +try: + # python 2 + from urllib2 import urlopen +except ImportError: + # python 3 + from urllib.request import urlopen + +from pbr.tests import base + + +class TestWsgiScripts(base.BaseTestCase): + + cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class') + + def _get_path(self): + if os.path.isdir("%s/lib64" % self.temp_dir): + path = "%s/lib64" % self.temp_dir + elif os.path.isdir("%s/lib" % self.temp_dir): + path = "%s/lib" % self.temp_dir + elif os.path.isdir("%s/site-packages" % self.temp_dir): + return ".:%s/site-packages" % self.temp_dir + else: + raise Exception("Could not determine path for test") + return ".:%s/python%s.%s/site-packages" % ( + path, + sys.version_info[0], + sys.version_info[1]) + + def test_wsgi_script_install(self): + """Test that we install a non-pkg-resources wsgi script.""" + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._check_wsgi_install_content(stdout) + + def test_wsgi_script_run(self): + """Test that we install a runnable wsgi script. + + This test actually attempts to start and interact with the + wsgi script in question to demonstrate that it's a working + wsgi script using simple server. + + """ + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._check_wsgi_install_content(stdout) + + # Live test run the scripts and see that they respond to wsgi + # requests. + for cmd_name in self.cmd_names: + self._test_wsgi(cmd_name, b'Hello World') + + def _test_wsgi(self, cmd_name, output, extra_args=None): + cmd = os.path.join(self.temp_dir, 'bin', cmd_name) + print("Running %s -p 0" % cmd) + popen_cmd = [cmd, '-p', '0'] + if extra_args: + popen_cmd.extend(extra_args) + + env = {'PYTHONPATH': self._get_path()} + + p = subprocess.Popen(popen_cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, cwd=self.temp_dir, + env=env) + self.addCleanup(p.kill) + + stdoutdata = p.stdout.readline() # ****... + + stdoutdata = p.stdout.readline() # STARTING test server... + self.assertIn( + b"STARTING test server pbr_testpackage.wsgi", + stdoutdata) + + stdoutdata = p.stdout.readline() # Available at ... + print(stdoutdata) + m = re.search(b'(http://[^:]+:\d+)/', stdoutdata) + self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata) + + stdoutdata = p.stdout.readline() # DANGER! ... + self.assertIn( + b"DANGER! For testing only, do not use in production", + stdoutdata) + + stdoutdata = p.stdout.readline() # ***... + + f = urlopen(m.group(1).decode('utf-8')) + self.assertEqual(output, f.read()) + + # Request again so that the application can force stderr.flush(), + # otherwise the log is buffered and the next readline() will hang. + urlopen(m.group(1).decode('utf-8')) + + stdoutdata = p.stderr.readline() + # we should have logged an HTTP request, return code 200, that + # returned the right amount of bytes + status = '"GET / HTTP/1.1" 200 %d' % len(output) + self.assertIn(status.encode('utf-8'), stdoutdata) + + def _check_wsgi_install_content(self, install_stdout): + for cmd_name in self.cmd_names: + install_txt = 'Installing %s script to %s' % (cmd_name, + self.temp_dir) + self.assertIn(install_txt, install_stdout) + + cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name) + + script_txt = open(cmd_filename, 'r').read() + self.assertNotIn('pkg_resources', script_txt) + + main_block = """if __name__ == "__main__": + import argparse + import socket + import sys + import wsgiref.simple_server as wss""" + + if cmd_name == 'pbr_test_wsgi': + app_name = "main" + else: + app_name = "WSGI.app" + + starting_block = ("STARTING test server pbr_testpackage.wsgi." + "%s" % app_name) + + else_block = """else: + application = None""" + + self.assertIn(main_block, script_txt) + self.assertIn(starting_block, script_txt) + self.assertIn(else_block, script_txt) + + def test_with_argument(self): + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"]) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/CHANGES.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/CHANGES.txt new file mode 100644 index 00000000..709b9d4c --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/CHANGES.txt @@ -0,0 +1,86 @@ +Changelog +=========== + +0.3 (unreleased) +------------------ + +- The ``glob_data_files`` hook became a pre-command hook for the install_data + command instead of being a setup-hook. This is to support the additional + functionality of requiring data_files with relative destination paths to be + install relative to the package's install path (i.e. site-packages). + +- Dropped support for and deprecated the easier_install custom command. + Although it should still work, it probably won't be used anymore for + stsci_python packages. + +- Added support for the ``build_optional_ext`` command, which replaces/extends + the default ``build_ext`` command. See the README for more details. + +- Added the ``tag_svn_revision`` setup_hook as a replacement for the + setuptools-specific tag_svn_revision option to the egg_info command. This + new hook is easier to use than the old tag_svn_revision option: It's + automatically enabled by the presence of ``.dev`` in the version string, and + disabled otherwise. + +- The ``svn_info_pre_hook`` and ``svn_info_post_hook`` have been replaced with + ``version_pre_command_hook`` and ``version_post_command_hook`` respectively. + However, a new ``version_setup_hook``, which has the same purpose, has been + added. It is generally easier to use and will give more consistent results + in that it will run every time setup.py is run, regardless of which command + is used. ``stsci.distutils`` itself uses this hook--see the `setup.cfg` file + and `stsci/distutils/__init__.py` for example usage. + +- Instead of creating an `svninfo.py` module, the new ``version_`` hooks create + a file called `version.py`. In addition to the SVN info that was included + in `svninfo.py`, it includes a ``__version__`` variable to be used by the + package's `__init__.py`. This allows there to be a hard-coded + ``__version__`` variable included in the source code, rather than using + pkg_resources to get the version. + +- In `version.py`, the variables previously named ``__svn_version__`` and + ``__full_svn_info__`` are now named ``__svn_revision__`` and + ``__svn_full_info__``. + +- Fixed a bug when using stsci.distutils in the installation of other packages + in the ``stsci.*`` namespace package. If stsci.distutils was not already + installed, and was downloaded automatically by distribute through the + setup_requires option, then ``stsci.distutils`` would fail to import. This + is because the way the namespace package (nspkg) mechanism currently works, + all packages belonging to the nspkg *must* be on the import path at initial + import time. + + So when installing stsci.tools, for example, if ``stsci.tools`` is imported + from within the source code at install time, but before ``stsci.distutils`` + is downloaded and added to the path, the ``stsci`` package is already + imported and can't be extended to include the path of ``stsci.distutils`` + after the fact. The easiest way of dealing with this, it seems, is to + delete ``stsci`` from ``sys.modules``, which forces it to be reimported, now + the its ``__path__`` extended to include ``stsci.distutil``'s path. + + +0.2.2 (2011-11-09) +------------------ + +- Fixed check for the issue205 bug on actual setuptools installs; before it + only worked on distribute. setuptools has the issue205 bug prior to version + 0.6c10. + +- Improved the fix for the issue205 bug, especially on setuptools. + setuptools, prior to 0.6c10, did not back of sys.modules either before + sandboxing, which causes serious problems. In fact, it's so bad that it's + not enough to add a sys.modules backup to the current sandbox: It's in fact + necessary to monkeypatch setuptools.sandbox.run_setup so that any subsequent + calls to it also back up sys.modules. + + +0.2.1 (2011-09-02) +------------------ + +- Fixed the dependencies so that setuptools is requirement but 'distribute' + specifically. Previously installation could fail if users had plain + setuptools installed and not distribute + +0.2 (2011-08-23) +------------------ + +- Initial public release diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/LICENSE.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/LICENSE.txt new file mode 100644 index 00000000..7e8019a8 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/LICENSE.txt @@ -0,0 +1,29 @@ +Copyright (C) 2005 Association of Universities for Research in Astronomy (AURA) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + 3. The name of AURA and its representatives may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/MANIFEST.in b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/MANIFEST.in new file mode 100644 index 00000000..2e35f3ed --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/MANIFEST.in @@ -0,0 +1,2 @@ +include data_files/* +exclude pbr_testpackage/extra.py diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/README.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/README.txt new file mode 100644 index 00000000..b6d84a7b --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/README.txt @@ -0,0 +1,148 @@ +Introduction +============ +This package contains utilities used to package some of STScI's Python +projects; specifically those projects that comprise stsci_python_ and +Astrolib_. + +It currently consists mostly of some setup_hook scripts meant for use with +`distutils2/packaging`_ and/or pbr_, and a customized easy_install command +meant for use with distribute_. + +This package is not meant for general consumption, though it might be worth +looking at for examples of how to do certain things with your own packages, but +YMMV. + +Features +======== + +Hook Scripts +------------ +Currently the main features of this package are a couple of setup_hook scripts. +In distutils2, a setup_hook is a script that runs at the beginning of any +pysetup command, and can modify the package configuration read from setup.cfg. +There are also pre- and post-command hooks that only run before/after a +specific setup command (eg. build_ext, install) is run. + +stsci.distutils.hooks.use_packages_root +''''''''''''''''''''''''''''''''''''''' +If using the ``packages_root`` option under the ``[files]`` section of +setup.cfg, this hook will add that path to ``sys.path`` so that modules in your +package can be imported and used in setup. This can be used even if +``packages_root`` is not specified--in this case it adds ``''`` to +``sys.path``. + +stsci.distutils.hooks.version_setup_hook +'''''''''''''''''''''''''''''''''''''''' +Creates a Python module called version.py which currently contains four +variables: + +* ``__version__`` (the release version) +* ``__svn_revision__`` (the SVN revision info as returned by the ``svnversion`` + command) +* ``__svn_full_info__`` (as returned by the ``svn info`` command) +* ``__setup_datetime__`` (the date and time that setup.py was last run). + +These variables can be imported in the package's `__init__.py` for degugging +purposes. The version.py module will *only* be created in a package that +imports from the version module in its `__init__.py`. It should be noted that +this is generally preferable to writing these variables directly into +`__init__.py`, since this provides more control and is less likely to +unexpectedly break things in `__init__.py`. + +stsci.distutils.hooks.version_pre_command_hook +'''''''''''''''''''''''''''''''''''''''''''''' +Identical to version_setup_hook, but designed to be used as a pre-command +hook. + +stsci.distutils.hooks.version_post_command_hook +''''''''''''''''''''''''''''''''''''''''''''''' +The complement to version_pre_command_hook. This will delete any version.py +files created during a build in order to prevent them from cluttering an SVN +working copy (note, however, that version.py is *not* deleted from the build/ +directory, so a copy of it is still preserved). It will also not be deleted +if the current directory is not an SVN working copy. For example, if source +code extracted from a source tarball it will be preserved. + +stsci.distutils.hooks.tag_svn_revision +'''''''''''''''''''''''''''''''''''''' +A setup_hook to add the SVN revision of the current working copy path to the +package version string, but only if the version ends in .dev. + +For example, ``mypackage-1.0.dev`` becomes ``mypackage-1.0.dev1234``. This is +in accordance with the version string format standardized by PEP 386. + +This should be used as a replacement for the ``tag_svn_revision`` option to +the egg_info command. This hook is more compatible with packaging/distutils2, +which does not include any VCS support. This hook is also more flexible in +that it turns the revision number on/off depending on the presence of ``.dev`` +in the version string, so that it's not automatically added to the version in +final releases. + +This hook does require the ``svnversion`` command to be available in order to +work. It does not examine the working copy metadata directly. + +stsci.distutils.hooks.numpy_extension_hook +'''''''''''''''''''''''''''''''''''''''''' +This is a pre-command hook for the build_ext command. To use it, add a +``[build_ext]`` section to your setup.cfg, and add to it:: + + pre-hook.numpy-extension-hook = stsci.distutils.hooks.numpy_extension_hook + +This hook must be used to build extension modules that use Numpy. The primary +side-effect of this hook is to add the correct numpy include directories to +`include_dirs`. To use it, add 'numpy' to the 'include-dirs' option of each +extension module that requires numpy to build. The value 'numpy' will be +replaced with the actual path to the numpy includes. + +stsci.distutils.hooks.is_display_option +''''''''''''''''''''''''''''''''''''''' +This is not actually a hook, but is a useful utility function that can be used +in writing other hooks. Basically, it returns ``True`` if setup.py was run +with a "display option" such as --version or --help. This can be used to +prevent your hook from running in such cases. + +stsci.distutils.hooks.glob_data_files +''''''''''''''''''''''''''''''''''''' +A pre-command hook for the install_data command. Allows filename wildcards as +understood by ``glob.glob()`` to be used in the data_files option. This hook +must be used in order to have this functionality since it does not normally +exist in distutils. + +This hook also ensures that data files are installed relative to the package +path. data_files shouldn't normally be installed this way, but the +functionality is required for a few special cases. + + +Commands +-------- +build_optional_ext +'''''''''''''''''' +This serves as an optional replacement for the default built_ext command, +which compiles C extension modules. Its purpose is to allow extension modules +to be *optional*, so that if their build fails the rest of the package is +still allowed to be built and installed. This can be used when an extension +module is not definitely required to use the package. + +To use this custom command, add:: + + commands = stsci.distutils.command.build_optional_ext.build_optional_ext + +under the ``[global]`` section of your package's setup.cfg. Then, to mark +an individual extension module as optional, under the setup.cfg section for +that extension add:: + + optional = True + +Optionally, you may also add a custom failure message by adding:: + + fail_message = The foobar extension module failed to compile. + This could be because you lack such and such headers. + This package will still work, but such and such features + will be disabled. + + +.. _stsci_python: http://www.stsci.edu/resources/software_hardware/pyraf/stsci_python +.. _Astrolib: http://www.scipy.org/AstroLib/ +.. _distutils2/packaging: http://distutils2.notmyidea.org/ +.. _d2to1: http://pypi.python.org/pypi/d2to1 +.. _distribute: http://pypi.python.org/pypi/distribute diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/a.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/a.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/b.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/b.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/c.rst b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/data_files/c.rst new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/conf.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/conf.py new file mode 100644 index 00000000..73585100 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/conf.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +sys.path.insert(0, os.path.abspath('../..')) +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', + #'sphinx.ext.intersphinx', +] + +# autodoc generation is a bit aggressive and a nuisance when doing heavy +# text edit cycles. +# execute "export SPHINX_DEBUG=1" in your terminal to disable + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'testpackage' +copyright = u'2013, OpenStack Foundation' + +# If true, '()' will be appended to :func: etc. cross-reference text. +add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# -- Options for HTML output -------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +# html_theme_path = ["."] +# html_theme = '_theme' +# html_static_path = ['static'] + +# Output file base name for HTML help builder. +htmlhelp_basename = '%sdoc' % project + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', + '%s.tex' % project, + u'%s Documentation' % project, + u'OpenStack Foundation', 'manual'), +] + +# Example configuration for intersphinx: refer to the Python standard library. +#intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/index.rst b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/index.rst new file mode 100644 index 00000000..9ce317fd --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/index.rst @@ -0,0 +1,23 @@ +.. testpackage documentation master file, created by + sphinx-quickstart on Tue Jul 9 22:26:36 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to testpackage's documentation! +======================================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + installation + usage + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/installation.rst b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/installation.rst new file mode 100644 index 00000000..65bca43f --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/installation.rst @@ -0,0 +1,12 @@ +============ +Installation +============ + +At the command line:: + + $ pip install testpackage + +Or, if you have virtualenvwrapper installed:: + + $ mkvirtualenv testpackage + $ pip install testpackage diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/usage.rst b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/usage.rst new file mode 100644 index 00000000..af97d795 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/doc/source/usage.rst @@ -0,0 +1,7 @@ +======== +Usage +======== + +To use testpackage in a project:: + + import testpackage diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/extra-file.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/extra-file.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/git-extra-file.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/git-extra-file.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/__init__.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/__init__.py new file mode 100644 index 00000000..aa56dc6f --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/__init__.py @@ -0,0 +1,3 @@ +import pbr.version + +__version__ = pbr.version.VersionInfo('pbr_testpackage').version_string() diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py new file mode 100644 index 00000000..f8b30876 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py @@ -0,0 +1,65 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +from distutils.command import build_py + + +def test_hook_1(config): + print('test_hook_1') + + +def test_hook_2(config): + print('test_hook_2') + + +class test_command(build_py.build_py): + command_name = 'build_py' + + def run(self): + print('Running custom build_py command.') + return build_py.build_py.run(self) + + +def test_pre_hook(cmdobj): + print('build_ext pre-hook') + + +def test_post_hook(cmdobj): + print('build_ext post-hook') diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/cmd.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/cmd.py new file mode 100644 index 00000000..4cc4522f --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/cmd.py @@ -0,0 +1,26 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + + +def main(): + print("PBR Test Command") + + +class Foo(object): + + @classmethod + def bar(self): + print("PBR Test Command - with class!") diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/extra.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/extra.py new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt new file mode 100644 index 00000000..e69de29b diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/wsgi.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/wsgi.py new file mode 100644 index 00000000..1edd54d3 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/pbr_testpackage/wsgi.py @@ -0,0 +1,40 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + +import argparse +import functools +import sys + + +def application(env, start_response, data): + sys.stderr.flush() # Force the previous request log to be written. + start_response('200 OK', [('Content-Type', 'text/html')]) + return [data.encode('utf-8')] + + +def main(): + parser = argparse.ArgumentParser(description='Return a string.') + parser.add_argument('--content', '-c', help='String returned', + default='Hello World') + args = parser.parse_args() + return functools.partial(application, data=args.content) + + +class WSGI(object): + + @classmethod + def app(self): + return functools.partial(application, data='Hello World') diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.cfg b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.cfg new file mode 100644 index 00000000..bf4c26a2 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.cfg @@ -0,0 +1,58 @@ +[metadata] +name = pbr_testpackage +# TODO(lifeless) we should inject this as needed otherwise we're not truely +# testing postversioned codepaths. +version = 0.1.dev +author = OpenStack +author-email = openstack-dev@lists.openstack.org +home-page = http://pypi.python.org/pypi/pbr +project_urls = + Bug Tracker = https://bugs.launchpad.net/pbr/ + Documentation = https://docs.openstack.org/pbr/ + Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/ +summary = Test package for testing pbr +description-file = + README.txt + CHANGES.txt +description-content-type = text/plain; charset=UTF-8 +requires-python = >=2.5 + +requires-dist = + setuptools + +classifier = + Development Status :: 3 - Alpha + Intended Audience :: Developers + License :: OSI Approved :: BSD License + Programming Language :: Python + Topic :: Scientific/Engineering + Topic :: Software Development :: Build Tools + Topic :: Software Development :: Libraries :: Python Modules + Topic :: System :: Archiving :: Packaging + +keywords = packaging, distutils, setuptools + +[files] +packages = pbr_testpackage +package-data = testpackage = package_data/*.txt +data-files = testpackage/data_files = data_files/* +extra-files = extra-file.txt + +[entry_points] +console_scripts = + pbr_test_cmd = pbr_testpackage.cmd:main + pbr_test_cmd_with_class = pbr_testpackage.cmd:Foo.bar + +wsgi_scripts = + pbr_test_wsgi = pbr_testpackage.wsgi:main + pbr_test_wsgi_with_class = pbr_testpackage.wsgi:WSGI.app + +[extension=pbr_testpackage.testext] +sources = src/testext.c +optional = True + +[global] +#setup-hooks = +# pbr_testpackage._setup_hooks.test_hook_1 +# pbr_testpackage._setup_hooks.test_hook_2 +commands = pbr_testpackage._setup_hooks.test_command diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.py b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.py new file mode 100755 index 00000000..88666910 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/setup.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import setuptools + +setuptools.setup( + setup_requires=['pbr'], + pbr=True, +) diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/src/testext.c b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/src/testext.c new file mode 100644 index 00000000..1b366e9b --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/src/testext.c @@ -0,0 +1,29 @@ +#include + + +static PyMethodDef TestextMethods[] = { + {NULL, NULL, 0, NULL} +}; + + +#if PY_MAJOR_VERSION >=3 +static struct PyModuleDef testextmodule = { + PyModuleDef_HEAD_INIT, /* This should correspond to a PyModuleDef_Base type */ + "testext", /* This is the module name */ + "Test extension module", /* This is the module docstring */ + -1, /* This defines the size of the module and says everything is global */ + TestextMethods /* This is the method definition */ +}; + +PyObject* +PyInit_testext(void) +{ + return PyModule_Create(&testextmodule); +} +#else +PyMODINIT_FUNC +inittestext(void) +{ + Py_InitModule("testext", TestextMethods); +} +#endif diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/test-requirements.txt b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/test-requirements.txt new file mode 100644 index 00000000..8755eb4c --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/testpackage/test-requirements.txt @@ -0,0 +1,2 @@ +ordereddict;python_version=='2.6' +requests-mock diff --git a/pbr-0.43.11-py2.7.egg/pbr/tests/util.py b/pbr-0.43.11-py2.7.egg/pbr/tests/util.py new file mode 100644 index 00000000..0e7bcf15 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/tests/util.py @@ -0,0 +1,78 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import contextlib +import os +import shutil +import stat +import sys + +try: + import ConfigParser as configparser +except ImportError: + import configparser + + +@contextlib.contextmanager +def open_config(filename): + if sys.version_info >= (3, 2): + cfg = configparser.ConfigParser() + else: + cfg = configparser.SafeConfigParser() + cfg.read(filename) + yield cfg + with open(filename, 'w') as fp: + cfg.write(fp) + + +def rmtree(path): + """shutil.rmtree() with error handler. + + Handle 'access denied' from trying to delete read-only files. + """ + + def onerror(func, path, exc_info): + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + return shutil.rmtree(path, onerror=onerror) diff --git a/pbr-0.43.11-py2.7.egg/pbr/util.py b/pbr-0.43.11-py2.7.egg/pbr/util.py new file mode 100644 index 00000000..31a2a262 --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/util.py @@ -0,0 +1,609 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +"""The code in this module is mostly copy/pasted out of the distutils2 source +code, as recommended by Tarek Ziade. As such, it may be subject to some change +as distutils2 development continues, and will have to be kept up to date. + +I didn't want to use it directly from distutils2 itself, since I do not want it +to be an installation dependency for our packages yet--it is still too unstable +(the latest version on PyPI doesn't even install). +""" + +# These first two imports are not used, but are needed to get around an +# irritating Python bug that can crop up when using ./setup.py test. +# See: http://www.eby-sarna.com/pipermail/peak/2010-May/003355.html +try: + import multiprocessing # flake8: noqa +except ImportError: + pass +import logging # flake8: noqa + +import os +import re +import sys +import traceback + +from collections import defaultdict + +import distutils.ccompiler +import pkg_resources + +from distutils import log +from distutils import errors +from setuptools.command.egg_info import manifest_maker +from setuptools import dist as st_dist +from setuptools import extension + +try: + import ConfigParser as configparser +except ImportError: + import configparser + +from pbr import extra_files +import pbr.hooks + +# A simplified RE for this; just checks that the line ends with version +# predicates in () +_VERSION_SPEC_RE = re.compile(r'\s*(.*?)\s*\((.*)\)\s*$') + + +# Mappings from setup() keyword arguments to setup.cfg options; +# The values are (section, option) tuples, or simply (section,) tuples if +# the option has the same name as the setup() argument +D1_D2_SETUP_ARGS = { + "name": ("metadata",), + "version": ("metadata",), + "author": ("metadata",), + "author_email": ("metadata",), + "maintainer": ("metadata",), + "maintainer_email": ("metadata",), + "url": ("metadata", "home_page"), + "project_urls": ("metadata",), + "description": ("metadata", "summary"), + "keywords": ("metadata",), + "long_description": ("metadata", "description"), + "long_description_content_type": ("metadata", "description_content_type"), + "download_url": ("metadata",), + "classifiers": ("metadata", "classifier"), + "platforms": ("metadata", "platform"), # ** + "license": ("metadata",), + # Use setuptools install_requires, not + # broken distutils requires + "install_requires": ("metadata", "requires_dist"), + "setup_requires": ("metadata", "setup_requires_dist"), + "provides": ("metadata", "provides_dist"), # ** + "obsoletes": ("metadata", "obsoletes_dist"), # ** + "package_dir": ("files", 'packages_root'), + "packages": ("files",), + "package_data": ("files",), + "namespace_packages": ("files",), + "data_files": ("files",), + "scripts": ("files",), + "py_modules": ("files", "modules"), # ** + "cmdclass": ("global", "commands"), + # Not supported in distutils2, but provided for + # backwards compatibility with setuptools + "use_2to3": ("backwards_compat", "use_2to3"), + "zip_safe": ("backwards_compat", "zip_safe"), + "tests_require": ("backwards_compat", "tests_require"), + "dependency_links": ("backwards_compat",), + "include_package_data": ("backwards_compat",), +} + +# setup() arguments that can have multiple values in setup.cfg +MULTI_FIELDS = ("classifiers", + "platforms", + "install_requires", + "provides", + "obsoletes", + "namespace_packages", + "packages", + "package_data", + "data_files", + "scripts", + "py_modules", + "dependency_links", + "setup_requires", + "tests_require", + "cmdclass") + +# setup() arguments that can have mapping values in setup.cfg +MAP_FIELDS = ("project_urls",) + +# setup() arguments that contain boolean values +BOOL_FIELDS = ("use_2to3", "zip_safe", "include_package_data") + + +CSV_FIELDS = ("keywords",) + + +def resolve_name(name): + """Resolve a name like ``module.object`` to an object and return it. + + Raise ImportError if the module or name is not found. + """ + + parts = name.split('.') + cursor = len(parts) - 1 + module_name = parts[:cursor] + attr_name = parts[-1] + + while cursor > 0: + try: + ret = __import__('.'.join(module_name), fromlist=[attr_name]) + break + except ImportError: + if cursor == 0: + raise + cursor -= 1 + module_name = parts[:cursor] + attr_name = parts[cursor] + ret = '' + + for part in parts[cursor:]: + try: + ret = getattr(ret, part) + except AttributeError: + raise ImportError(name) + + return ret + + +def cfg_to_args(path='setup.cfg', script_args=()): + """Distutils2 to distutils1 compatibility util. + + This method uses an existing setup.cfg to generate a dictionary of + keywords that can be used by distutils.core.setup(kwargs**). + + :param path: + The setup.cfg path. + :param script_args: + List of commands setup.py was called with. + :raises DistutilsFileError: + When the setup.cfg file is not found. + """ + + # The method source code really starts here. + if sys.version_info >= (3, 2): + parser = configparser.ConfigParser() + else: + parser = configparser.SafeConfigParser() + if not os.path.exists(path): + raise errors.DistutilsFileError("file '%s' does not exist" % + os.path.abspath(path)) + try: + parser.read(path, encoding='utf-8') + except TypeError: + # Python 2 doesn't accept the encoding kwarg + parser.read(path) + config = {} + for section in parser.sections(): + config[section] = dict() + for k, value in parser.items(section): + config[section][k.replace('-', '_')] = value + + # Run setup_hooks, if configured + setup_hooks = has_get_option(config, 'global', 'setup_hooks') + package_dir = has_get_option(config, 'files', 'packages_root') + + # Add the source package directory to sys.path in case it contains + # additional hooks, and to make sure it's on the path before any existing + # installations of the package + if package_dir: + package_dir = os.path.abspath(package_dir) + sys.path.insert(0, package_dir) + + try: + if setup_hooks: + setup_hooks = [ + hook for hook in split_multiline(setup_hooks) + if hook != 'pbr.hooks.setup_hook'] + for hook in setup_hooks: + hook_fn = resolve_name(hook) + try : + hook_fn(config) + except SystemExit: + log.error('setup hook %s terminated the installation') + except: + e = sys.exc_info()[1] + log.error('setup hook %s raised exception: %s\n' % + (hook, e)) + log.error(traceback.format_exc()) + sys.exit(1) + + # Run the pbr hook + pbr.hooks.setup_hook(config) + + kwargs = setup_cfg_to_setup_kwargs(config, script_args) + + # Set default config overrides + kwargs['include_package_data'] = True + kwargs['zip_safe'] = False + + register_custom_compilers(config) + + ext_modules = get_extension_modules(config) + if ext_modules: + kwargs['ext_modules'] = ext_modules + + entry_points = get_entry_points(config) + if entry_points: + kwargs['entry_points'] = entry_points + + # Handle the [files]/extra_files option + files_extra_files = has_get_option(config, 'files', 'extra_files') + if files_extra_files: + extra_files.set_extra_files(split_multiline(files_extra_files)) + + finally: + # Perform cleanup if any paths were added to sys.path + if package_dir: + sys.path.pop(0) + + return kwargs + + +def setup_cfg_to_setup_kwargs(config, script_args=()): + """Processes the setup.cfg options and converts them to arguments accepted + by setuptools' setup() function. + """ + + kwargs = {} + + # Temporarily holds install_requires and extra_requires while we + # parse env_markers. + all_requirements = {} + + for arg in D1_D2_SETUP_ARGS: + if len(D1_D2_SETUP_ARGS[arg]) == 2: + # The distutils field name is different than distutils2's. + section, option = D1_D2_SETUP_ARGS[arg] + + elif len(D1_D2_SETUP_ARGS[arg]) == 1: + # The distutils field name is the same thant distutils2's. + section = D1_D2_SETUP_ARGS[arg][0] + option = arg + + in_cfg_value = has_get_option(config, section, option) + if not in_cfg_value: + # There is no such option in the setup.cfg + if arg == "long_description": + in_cfg_value = has_get_option(config, section, + "description_file") + if in_cfg_value: + in_cfg_value = split_multiline(in_cfg_value) + value = '' + for filename in in_cfg_value: + description_file = open(filename) + try: + value += description_file.read().strip() + '\n\n' + finally: + description_file.close() + in_cfg_value = value + else: + continue + + if arg in CSV_FIELDS: + in_cfg_value = split_csv(in_cfg_value) + if arg in MULTI_FIELDS: + in_cfg_value = split_multiline(in_cfg_value) + elif arg in MAP_FIELDS: + in_cfg_map = {} + for i in split_multiline(in_cfg_value): + k, v = i.split('=') + in_cfg_map[k.strip()] = v.strip() + in_cfg_value = in_cfg_map + elif arg in BOOL_FIELDS: + # Provide some flexibility here... + if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): + in_cfg_value = True + else: + in_cfg_value = False + + if in_cfg_value: + if arg in ('install_requires', 'tests_require'): + # Replaces PEP345-style version specs with the sort expected by + # setuptools + in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred) + for pred in in_cfg_value] + if arg == 'install_requires': + # Split install_requires into package,env_marker tuples + # These will be re-assembled later + install_requires = [] + requirement_pattern = '(?P[^;]*);?(?P[^#]*?)(?:\s*#.*)?$' + for requirement in in_cfg_value: + m = re.match(requirement_pattern, requirement) + requirement_package = m.group('package').strip() + env_marker = m.group('env_marker').strip() + install_requires.append((requirement_package,env_marker)) + all_requirements[''] = install_requires + elif arg == 'package_dir': + in_cfg_value = {'': in_cfg_value} + elif arg in ('package_data', 'data_files'): + data_files = {} + firstline = True + prev = None + for line in in_cfg_value: + if '=' in line: + key, value = line.split('=', 1) + key, value = (key.strip(), value.strip()) + if key in data_files: + # Multiple duplicates of the same package name; + # this is for backwards compatibility of the old + # format prior to d2to1 0.2.6. + prev = data_files[key] + prev.extend(value.split()) + else: + prev = data_files[key.strip()] = value.split() + elif firstline: + raise errors.DistutilsOptionError( + 'malformed package_data first line %r (misses ' + '"=")' % line) + else: + prev.extend(line.strip().split()) + firstline = False + if arg == 'data_files': + # the data_files value is a pointlessly different structure + # from the package_data value + data_files = data_files.items() + in_cfg_value = data_files + elif arg == 'cmdclass': + cmdclass = {} + dist = st_dist.Distribution() + for cls_name in in_cfg_value: + cls = resolve_name(cls_name) + cmd = cls(dist) + cmdclass[cmd.get_command_name()] = cls + in_cfg_value = cmdclass + + kwargs[arg] = in_cfg_value + + # Transform requirements with embedded environment markers to + # setuptools' supported marker-per-requirement format. + # + # install_requires are treated as a special case of extras, before + # being put back in the expected place + # + # fred = + # foo:marker + # bar + # -> {'fred': ['bar'], 'fred:marker':['foo']} + + if 'extras' in config: + requirement_pattern = '(?P[^:]*):?(?P[^#]*?)(?:\s*#.*)?$' + extras = config['extras'] + # Add contents of test-requirements, if any, into an extra named + # 'test' if one does not already exist. + if 'test' not in extras: + from pbr import packaging + extras['test'] = "\n".join(packaging.parse_requirements( + packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') + + for extra in extras: + extra_requirements = [] + requirements = split_multiline(extras[extra]) + for requirement in requirements: + m = re.match(requirement_pattern, requirement) + extras_value = m.group('package').strip() + env_marker = m.group('env_marker') + extra_requirements.append((extras_value,env_marker)) + all_requirements[extra] = extra_requirements + + # Transform the full list of requirements into: + # - install_requires, for those that have no extra and no + # env_marker + # - named extras, for those with an extra name (which may include + # an env_marker) + # - and as a special case, install_requires with an env_marker are + # treated as named extras where the name is the empty string + + extras_require = {} + for req_group in all_requirements: + for requirement, env_marker in all_requirements[req_group]: + if env_marker: + extras_key = '%s:(%s)' % (req_group, env_marker) + # We do not want to poison wheel creation with locally + # evaluated markers. sdists always re-create the egg_info + # and as such do not need guarded, and pip will never call + # multiple setup.py commands at once. + if 'bdist_wheel' not in script_args: + try: + if pkg_resources.evaluate_marker('(%s)' % env_marker): + extras_key = req_group + except SyntaxError: + log.error( + "Marker evaluation failed, see the following " + "error. For more information see: " + "http://docs.openstack.org/" + "developer/pbr/compatibility.html#evaluate-marker" + ) + raise + else: + extras_key = req_group + extras_require.setdefault(extras_key, []).append(requirement) + + kwargs['install_requires'] = extras_require.pop('', []) + kwargs['extras_require'] = extras_require + + return kwargs + + +def register_custom_compilers(config): + """Handle custom compilers; this has no real equivalent in distutils, where + additional compilers could only be added programmatically, so we have to + hack it in somehow. + """ + + compilers = has_get_option(config, 'global', 'compilers') + if compilers: + compilers = split_multiline(compilers) + for compiler in compilers: + compiler = resolve_name(compiler) + + # In distutils2 compilers these class attributes exist; for + # distutils1 we just have to make something up + if hasattr(compiler, 'name'): + name = compiler.name + else: + name = compiler.__name__ + if hasattr(compiler, 'description'): + desc = compiler.description + else: + desc = 'custom compiler %s' % name + + module_name = compiler.__module__ + # Note; this *will* override built in compilers with the same name + # TODO: Maybe display a warning about this? + cc = distutils.ccompiler.compiler_class + cc[name] = (module_name, compiler.__name__, desc) + + # HACK!!!! Distutils assumes all compiler modules are in the + # distutils package + sys.modules['distutils.' + module_name] = sys.modules[module_name] + + +def get_extension_modules(config): + """Handle extension modules""" + + EXTENSION_FIELDS = ("sources", + "include_dirs", + "define_macros", + "undef_macros", + "library_dirs", + "libraries", + "runtime_library_dirs", + "extra_objects", + "extra_compile_args", + "extra_link_args", + "export_symbols", + "swig_opts", + "depends") + + ext_modules = [] + for section in config: + if ':' in section: + labels = section.split(':', 1) + else: + # Backwards compatibility for old syntax; don't use this though + labels = section.split('=', 1) + labels = [l.strip() for l in labels] + if (len(labels) == 2) and (labels[0] == 'extension'): + ext_args = {} + for field in EXTENSION_FIELDS: + value = has_get_option(config, section, field) + # All extension module options besides name can have multiple + # values + if not value: + continue + value = split_multiline(value) + if field == 'define_macros': + macros = [] + for macro in value: + macro = macro.split('=', 1) + if len(macro) == 1: + macro = (macro[0].strip(), None) + else: + macro = (macro[0].strip(), macro[1].strip()) + macros.append(macro) + value = macros + ext_args[field] = value + if ext_args: + if 'name' not in ext_args: + ext_args['name'] = labels[1] + ext_modules.append(extension.Extension(ext_args.pop('name'), + **ext_args)) + return ext_modules + + +def get_entry_points(config): + """Process the [entry_points] section of setup.cfg to handle setuptools + entry points. This is, of course, not a standard feature of + distutils2/packaging, but as there is not currently a standard alternative + in packaging, we provide support for them. + """ + + if not 'entry_points' in config: + return {} + + return dict((option, split_multiline(value)) + for option, value in config['entry_points'].items()) + + +def has_get_option(config, section, option): + if section in config and option in config[section]: + return config[section][option] + else: + return False + + +def split_multiline(value): + """Special behaviour when we have a multi line options""" + + value = [element for element in + (line.strip() for line in value.split('\n')) + if element and not element.startswith('#')] + return value + + +def split_csv(value): + """Special behaviour when we have a comma separated options""" + + value = [element for element in + (chunk.strip() for chunk in value.split(',')) + if element] + return value + + +# The following classes are used to hack Distribution.command_options a bit +class DefaultGetDict(defaultdict): + """Like defaultdict, but the get() method also sets and returns the default + value. + """ + + def get(self, key, default=None): + if default is None: + default = self.default_factory() + return super(DefaultGetDict, self).setdefault(key, default) diff --git a/pbr-0.43.11-py2.7.egg/pbr/version.py b/pbr-0.43.11-py2.7.egg/pbr/version.py new file mode 100644 index 00000000..5eb217af --- /dev/null +++ b/pbr-0.43.11-py2.7.egg/pbr/version.py @@ -0,0 +1,483 @@ + +# Copyright 2012 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Utilities for consuming the version from pkg_resources. +""" + +import itertools +import operator +import sys + + +def _is_int(string): + try: + int(string) + return True + except ValueError: + return False + + +class SemanticVersion(object): + """A pure semantic version independent of serialisation. + + See the pbr doc 'semver' for details on the semantics. + """ + + def __init__( + self, major, minor=0, patch=0, prerelease_type=None, + prerelease=None, dev_count=None): + """Create a SemanticVersion. + + :param major: Major component of the version. + :param minor: Minor component of the version. Defaults to 0. + :param patch: Patch level component. Defaults to 0. + :param prerelease_type: What sort of prerelease version this is - + one of a(alpha), b(beta) or rc(release candidate). + :param prerelease: For prerelease versions, what number prerelease. + Defaults to 0. + :param dev_count: How many commits since the last release. + """ + self._major = major + self._minor = minor + self._patch = patch + self._prerelease_type = prerelease_type + self._prerelease = prerelease + if self._prerelease_type and not self._prerelease: + self._prerelease = 0 + self._dev_count = dev_count or 0 # Normalise 0 to None. + + def __eq__(self, other): + if not isinstance(other, SemanticVersion): + return False + return self.__dict__ == other.__dict__ + + def __hash__(self): + return sum(map(hash, self.__dict__.values())) + + def _sort_key(self): + """Return a key for sorting SemanticVersion's on.""" + # key things: + # - final is after rc's, so we make that a/b/rc/z + # - dev==None is after all other devs, so we use sys.maxsize there. + # - unqualified dev releases come before any pre-releases. + # So we do: + # (major, minor, patch) - gets the major grouping. + # (0|1) unqualified dev flag + # (a/b/rc/z) - release segment grouping + # pre-release level + # dev count, maxsize for releases. + rc_lookup = {'a': 'a', 'b': 'b', 'rc': 'rc', None: 'z'} + if self._dev_count and not self._prerelease_type: + uq_dev = 0 + else: + uq_dev = 1 + return ( + self._major, self._minor, self._patch, + uq_dev, + rc_lookup[self._prerelease_type], self._prerelease, + self._dev_count or sys.maxsize) + + def __lt__(self, other): + """Compare self and other, another Semantic Version.""" + # NB(lifeless) this could perhaps be rewritten as + # lt (tuple_of_one, tuple_of_other) with a single check for + # the typeerror corner cases - that would likely be faster + # if this ever becomes performance sensitive. + if not isinstance(other, SemanticVersion): + raise TypeError("ordering to non-SemanticVersion is undefined") + return self._sort_key() < other._sort_key() + + def __le__(self, other): + return self == other or self < other + + def __ge__(self, other): + return not self < other + + def __gt__(self, other): + return not self <= other + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "pbr.version.SemanticVersion(%s)" % self.release_string() + + @classmethod + def from_pip_string(klass, version_string): + """Create a SemanticVersion from a pip version string. + + This method will parse a version like 1.3.0 into a SemanticVersion. + + This method is responsible for accepting any version string that any + older version of pbr ever created. + + Therefore: versions like 1.3.0a1 versions are handled, parsed into a + canonical form and then output - resulting in 1.3.0.0a1. + Pre pbr-semver dev versions like 0.10.1.3.g83bef74 will be parsed but + output as 0.10.1.dev3.g83bef74. + + :raises ValueError: Never tagged versions sdisted by old pbr result in + just the git hash, e.g. '1234567' which poses a substantial problem + since they collide with the semver versions when all the digits are + numerals. Such versions will result in a ValueError being thrown if + any non-numeric digits are present. They are an exception to the + general case of accepting anything we ever output, since they were + never intended and would permanently mess up versions on PyPI if + ever released - we're treating that as a critical bug that we ever + made them and have stopped doing that. + """ + + try: + return klass._from_pip_string_unsafe(version_string) + except IndexError: + raise ValueError("Invalid version %r" % version_string) + + @classmethod + def _from_pip_string_unsafe(klass, version_string): + # Versions need to start numerically, ignore if not + version_string = version_string.lstrip('vV') + if not version_string[:1].isdigit(): + raise ValueError("Invalid version %r" % version_string) + input_components = version_string.split('.') + # decimals first (keep pre-release and dev/hashes to the right) + components = [c for c in input_components if c.isdigit()] + digit_len = len(components) + if digit_len == 0: + raise ValueError("Invalid version %r" % version_string) + elif digit_len < 3: + if (digit_len < len(input_components) and + input_components[digit_len][0].isdigit()): + # Handle X.YaZ - Y is a digit not a leadin to pre-release. + mixed_component = input_components[digit_len] + last_component = ''.join(itertools.takewhile( + lambda x: x.isdigit(), mixed_component)) + components.append(last_component) + input_components[digit_len:digit_len + 1] = [ + last_component, mixed_component[len(last_component):]] + digit_len += 1 + components.extend([0] * (3 - digit_len)) + components.extend(input_components[digit_len:]) + major = int(components[0]) + minor = int(components[1]) + dev_count = None + post_count = None + prerelease_type = None + prerelease = None + + def _parse_type(segment): + # Discard leading digits (the 0 in 0a1) + isdigit = operator.methodcaller('isdigit') + segment = ''.join(itertools.dropwhile(isdigit, segment)) + isalpha = operator.methodcaller('isalpha') + prerelease_type = ''.join(itertools.takewhile(isalpha, segment)) + prerelease = segment[len(prerelease_type)::] + return prerelease_type, int(prerelease) + if _is_int(components[2]): + patch = int(components[2]) + else: + # legacy version e.g. 1.2.0a1 (canonical is 1.2.0.0a1) + # or 1.2.dev4.g1234 or 1.2.b4 + patch = 0 + components[2:2] = [0] + remainder = components[3:] + remainder_starts_with_int = False + try: + if remainder and int(remainder[0]): + remainder_starts_with_int = True + except ValueError: + pass + if remainder_starts_with_int: + # old dev format - 0.1.2.3.g1234 + dev_count = int(remainder[0]) + else: + if remainder and (remainder[0][0] == '0' or + remainder[0][0] in ('a', 'b', 'r')): + # Current RC/beta layout + prerelease_type, prerelease = _parse_type(remainder[0]) + remainder = remainder[1:] + while remainder: + component = remainder[0] + if component.startswith('dev'): + dev_count = int(component[3:]) + elif component.startswith('post'): + dev_count = None + post_count = int(component[4:]) + else: + raise ValueError( + 'Unknown remainder %r in %r' + % (remainder, version_string)) + remainder = remainder[1:] + result = SemanticVersion( + major, minor, patch, prerelease_type=prerelease_type, + prerelease=prerelease, dev_count=dev_count) + if post_count: + if dev_count: + raise ValueError( + 'Cannot combine postN and devN - no mapping in %r' + % (version_string,)) + result = result.increment().to_dev(post_count) + return result + + def brief_string(self): + """Return the short version minus any alpha/beta tags.""" + return "%s.%s.%s" % (self._major, self._minor, self._patch) + + def debian_string(self): + """Return the version number to use when building a debian package. + + This translates the PEP440/semver precedence rules into Debian version + sorting operators. + """ + return self._long_version("~") + + def decrement(self): + """Return a decremented SemanticVersion. + + Decrementing versions doesn't make a lot of sense - this method only + exists to support rendering of pre-release versions strings into + serialisations (such as rpm) with no sort-before operator. + + The 9999 magic version component is from the spec on this - pbr-semver. + + :return: A new SemanticVersion object. + """ + if self._patch: + new_patch = self._patch - 1 + new_minor = self._minor + new_major = self._major + else: + new_patch = 9999 + if self._minor: + new_minor = self._minor - 1 + new_major = self._major + else: + new_minor = 9999 + if self._major: + new_major = self._major - 1 + else: + new_major = 0 + return SemanticVersion( + new_major, new_minor, new_patch) + + def increment(self, minor=False, major=False): + """Return an incremented SemanticVersion. + + The default behaviour is to perform a patch level increment. When + incrementing a prerelease version, the patch level is not changed + - the prerelease serial is changed (e.g. beta 0 -> beta 1). + + Incrementing non-pre-release versions will not introduce pre-release + versions - except when doing a patch incremental to a pre-release + version the new version will only consist of major/minor/patch. + + :param minor: Increment the minor version. + :param major: Increment the major version. + :return: A new SemanticVersion object. + """ + if self._prerelease_type: + new_prerelease_type = self._prerelease_type + new_prerelease = self._prerelease + 1 + new_patch = self._patch + else: + new_prerelease_type = None + new_prerelease = None + new_patch = self._patch + 1 + if minor: + new_minor = self._minor + 1 + new_patch = 0 + new_prerelease_type = None + new_prerelease = None + else: + new_minor = self._minor + if major: + new_major = self._major + 1 + new_minor = 0 + new_patch = 0 + new_prerelease_type = None + new_prerelease = None + else: + new_major = self._major + return SemanticVersion( + new_major, new_minor, new_patch, + new_prerelease_type, new_prerelease) + + def _long_version(self, pre_separator, rc_marker=""): + """Construct a long string version of this semver. + + :param pre_separator: What separator to use between components + that sort before rather than after. If None, use . and lower the + version number of the component to preserve sorting. (Used for + rpm support) + """ + if ((self._prerelease_type or self._dev_count) + and pre_separator is None): + segments = [self.decrement().brief_string()] + pre_separator = "." + else: + segments = [self.brief_string()] + if self._prerelease_type: + segments.append( + "%s%s%s%s" % (pre_separator, rc_marker, self._prerelease_type, + self._prerelease)) + if self._dev_count: + if not self._prerelease_type: + segments.append(pre_separator) + else: + segments.append('.') + segments.append('dev') + segments.append(self._dev_count) + return "".join(str(s) for s in segments) + + def release_string(self): + """Return the full version of the package. + + This including suffixes indicating VCS status. + """ + return self._long_version(".", "0") + + def rpm_string(self): + """Return the version number to use when building an RPM package. + + This translates the PEP440/semver precedence rules into RPM version + sorting operators. Because RPM has no sort-before operator (such as the + ~ operator in dpkg), we show all prerelease versions as being versions + of the release before. + """ + return self._long_version(None) + + def to_dev(self, dev_count): + """Return a development version of this semver. + + :param dev_count: The number of commits since the last release. + """ + return SemanticVersion( + self._major, self._minor, self._patch, self._prerelease_type, + self._prerelease, dev_count=dev_count) + + def version_tuple(self): + """Present the version as a version_info tuple. + + For documentation on version_info tuples see the Python + documentation for sys.version_info. + + Since semver and PEP-440 represent overlapping but not subsets of + versions, we have to have some heuristic / mapping rules, and have + extended the releaselevel field to have alphadev, betadev and + candidatedev values. When they are present the dev count is used + to provide the serial. + - a/b/rc take precedence. + - if there is no pre-release version the dev version is used. + - serial is taken from the dev/a/b/c component. + - final non-dev versions never get serials. + """ + segments = [self._major, self._minor, self._patch] + if self._prerelease_type: + type_map = {('a', False): 'alpha', + ('b', False): 'beta', + ('rc', False): 'candidate', + ('a', True): 'alphadev', + ('b', True): 'betadev', + ('rc', True): 'candidatedev', + } + segments.append( + type_map[(self._prerelease_type, bool(self._dev_count))]) + segments.append(self._dev_count or self._prerelease) + elif self._dev_count: + segments.append('dev') + segments.append(self._dev_count - 1) + else: + segments.append('final') + segments.append(0) + return tuple(segments) + + +class VersionInfo(object): + + def __init__(self, package): + """Object that understands versioning for a package + + :param package: name of the python package, such as glance, or + python-glanceclient + """ + self.package = package + self.version = None + self._cached_version = None + self._semantic = None + + def __str__(self): + """Make the VersionInfo object behave like a string.""" + return self.version_string() + + def __repr__(self): + """Include the name.""" + return "pbr.version.VersionInfo(%s:%s)" % ( + self.package, self.version_string()) + + def _get_version_from_pkg_resources(self): + """Obtain a version from pkg_resources or setup-time logic if missing. + + This will try to get the version of the package from the pkg_resources + record associated with the package, and if there is no such record + falls back to the logic sdist would use. + """ + # Lazy import because pkg_resources is costly to import so defer until + # we absolutely need it. + import pkg_resources + try: + requirement = pkg_resources.Requirement.parse(self.package) + provider = pkg_resources.get_provider(requirement) + result_string = provider.version + except pkg_resources.DistributionNotFound: + # The most likely cause for this is running tests in a tree + # produced from a tarball where the package itself has not been + # installed into anything. Revert to setup-time logic. + from pbr import packaging + result_string = packaging.get_version(self.package) + return SemanticVersion.from_pip_string(result_string) + + def release_string(self): + """Return the full version of the package. + + This including suffixes indicating VCS status. + """ + return self.semantic_version().release_string() + + def semantic_version(self): + """Return the SemanticVersion object for this version.""" + if self._semantic is None: + self._semantic = self._get_version_from_pkg_resources() + return self._semantic + + def version_string(self): + """Return the short version minus any alpha/beta tags.""" + return self.semantic_version().brief_string() + + # Compatibility functions + canonical_version_string = version_string + version_string_with_vcs = release_string + + def cached_version_string(self, prefix=""): + """Return a cached version string. + + This will return a cached version string if one is already cached, + irrespective of prefix. If none is cached, one will be created with + prefix and then cached and returned. + """ + if not self._cached_version: + self._cached_version = "%s%s" % (prefix, + self.version_string()) + return self._cached_version From 6a4fe613977fd05e4fd5175cb336742327f1ea59 Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 11:54:42 +0300 Subject: [PATCH 12/22] removed comments --- lago/cmd.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lago/cmd.py b/lago/cmd.py index 49eb297b..79e399f2 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -760,7 +760,6 @@ def do_collect(prefix, output, no_skip, **kwargs): def do_deploy(prefix, **kwargs): prefix.deploy() -###### @lago.plugins.cli.cli_plugin( help='Verify that the machine runninh Lago is well configured and configure if needed' ) @@ -768,7 +767,6 @@ def do_deploy(prefix, **kwargs): '--username', '-u', help='Which user needs to be configured', - #default=running_user, action='store', ) @@ -824,12 +822,10 @@ def do_setup( print("Please use 'sudo', you need adminstrator permissions for configuration") sys.exit(1) else: - # verify_lago.displayLagoStatus() fix_configuration(username,envs_dir,config_dict) config_dict = check_configuration(username,envs_dir) (verify_status,list_not_configure) = validate_status(config_dict) verify_lago.fixLagoConfiguration(config_dict,verify_status) - # verify_lago.displayLagoStatus() LOGGER.error("Problem to configure: %s", str(list_not_configure)) if verify_status: @@ -837,8 +833,6 @@ def do_setup( else: sys.exit(2) -###### - @lago.plugins.cli.cli_plugin(help="Dump configuration file") @lago.plugins.cli.cli_plugin_add_argument( '--verbose', From 73e46c881bf08314db132eca1119aacc92c7d011 Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 11:59:03 +0300 Subject: [PATCH 13/22] removed comment from the import area --- lago/cmd.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lago/cmd.py b/lago/cmd.py index 79e399f2..b5482b98 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -36,7 +36,6 @@ from lago.config import config from lago import (log_utils, workdir as lago_workdir, utils, lago_ansible) from lago.utils import (in_prefix, with_logging, LagoUserException) -#import lago.verify_configuration as setup from lago.verify_configuration import (fix_configuration, check_configuration, check_user, check_directory,validate_status, VerifyLagoStatus) LOGGER = logging.getLogger('cli') From 67b89abc56c230e876fb67ebf9803595c53c4237 Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 18:29:17 +0300 Subject: [PATCH 14/22] used getstatusoutput instead of getoutput and os.system --- lago/verify_configuration.py | 93 +++++++++++++++++++++++++----------- 1 file changed, 64 insertions(+), 29 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 0c9136c0..708ca33c 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -132,14 +132,14 @@ def get_cpu_vendor(): """ Get the CPU vendor ie. intel/amd """ - Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") + #Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") + (exit_code,Input ) = exec_cmd("lscpu | awk '/Vendor ID/{print $3}'") if Input == 'GenuineIntel': vendor = "intel" elif vendor == 'AuthenticAMD': #print "amd" vendor = "amd" else: - #print "unrecognized CPU vendor: $vendor, only Intel/AMD are supported" vendor = "problem" return vendor @@ -147,7 +147,9 @@ def is_virtualization_enable(): """ Check if Virtualization enabled """ - res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") + #res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") + (exit_code,res) = exec_cmd("cat /proc/cpuinfo | egrep 'vmx|svm'") + if res == "": status = "N" else: @@ -158,7 +160,10 @@ def check_kvm_configure(vendor): """ Check if KVM configure """ - res = commands.getoutput("lsmod | grep kvm_"+vendor) + cmd = "lsmod | grep kvm_"+vendor + # res = commands.getoutput("lsmod | grep kvm_"+vendor) + (exit_code,res) = exec_cmd("lsmod | grep kvm_"+vendor) + if res == "": status = "N" else: @@ -171,7 +176,8 @@ def check_nested(vendor): """ mod="kvm_"+vendor cmd = "cat /sys/module/"+mod+"/parameters/nested" - is_enabled= commands.getoutput(cmd) + # is_enabled= commands.getoutput(cmd) + (exit_code,is_enabled) = exec_cmd(cmd) if is_enabled == 'Y': return 'Y' else: @@ -182,9 +188,14 @@ def check_groups(username): Check the groups are confiugre correct for LAGO """ ## all groups username in - groups_username = commands.getoutput("groups " + username) + #groups_username = commands.getoutput("groups " + username) + cmd = "groups " + username + (exit_code,groups_username) = exec_cmd(cmd) + status_username = all(x in groups_username for x in ['qemu','libvirt','lago',username]) - groups_qemu = commands.getoutput("groups qemu") + #groups_qemu = commands.getoutput("groups qemu") + cmd = "groups qemu" + (exit_code,groups_qemu) = exec_cmd(cmd) status_qemu = all(x in groups_qemu for x in [username]) if ( status_username & status_qemu ): return 'Y' @@ -195,8 +206,10 @@ def change_groups(username): """ Update the groups according to LAGO permissions """ - os.system("usermod -a -G qemu,libvirt,lago " + username) - os.system("usermod -a -G " + username + " qemu" ) + #os.system("usermod -a -G qemu,libvirt,lago " + username) + exec_cmd("usermod -a -G qemu,libvirt,lago " + username) + #os.system("usermod -a -G " + username + " qemu" ) + exec_cmd("usermod -a -G " + username + " qemu") def check_home_dir_permmisions(): import stat @@ -212,7 +225,9 @@ def check_home_dir_permmisions(): def change_home_dir_permissions(): _USERNAME = os.getenv("SUDO_USER") or os.getenv("USER") _HOME = os.path.expanduser('~'+_USERNAME) - os.system("chmod g+x " + _HOME ) + #os.system("chmod g+x " + _HOME ) + exec_cmd("chmod g+x " + _HOME) + def remove_write_permissions(path): """Remove write permissions from this path, while keeping all other permissions intact. @@ -234,15 +249,18 @@ def check_permissions(envs_dirs,username): Check directory permissions """ status = True - uid = int(commands.getoutput("id -u " + username) ) - gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) - + #uid = int(commands.getoutput("id -u " + username) ) + (exit_code,uid) = exec_cmd("id -u " + username) + #gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) + (exit_code,gid) = exec_cmd("getent group " + username + " | awk -F: '{print $3}'") for dirpath, dirnames, filenames in os.walk(envs_dirs): + if ( os.stat(dirpath).st_uid != int(uid) ) & (os.stat(dirpath).st_gid != int(gid)): + status = False for dirname in dirnames: - if ( os.stat(os.path.join(dirpath, dirname)).st_uid != uid ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != gid): + if ( os.stat(os.path.join(dirpath, dirname)).st_uid != int(uid) ) & (os.stat(os.path.join(dirpath, dirname)).st_gid != int(gid)): status = False for filename in filenames: - if ( os.stat(os.path.join(dirpath, filename)).st_uid != uid ) & (os.stat(os.path.join(dirpath, filename)).st_gid != gid): + if ( os.stat(os.path.join(dirpath, filename)).st_uid != int(uid) ) & (os.stat(os.path.join(dirpath, filename)).st_gid != int(gid)): status = False if ( status ): return 'Y' @@ -253,13 +271,16 @@ def change_permissions(envs_dirs,username): """ Change directory permissions """ - uid = int(commands.getoutput("id -u " + username) ) - gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) + #uid = int(commands.getoutput("id -u " + username) ) + (exit_code,uid) = exec_cmd("id -u " + username) + #gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) + (exit_code,gid) = exec_cmd("getent group " + username + " | awk -F: '{print $3}'") for dirpath, dirnames, filenames in os.walk(envs_dirs): + os.chown(dirpath, int(uid), int(gid)) for dirname in dirnames: - os.chown(os.path.join(dirpath, dirname), uid, gid) + os.chown(os.path.join(dirpath, dirname), int(uid), int(gid)) for filename in filenames: - os.chown(os.path.join(dirpath, filename), uid, gid) + os.chown(os.path.join(dirpath, filename), int(uid), int(gid)) def check_packages_installed(): """ @@ -271,7 +292,9 @@ def check_packages_installed(): pkg_list = ["mysql-community-server","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] - rpm_output = commands.getoutput("rpm -qa ") + #rpm_output = commands.getoutput("rpm -qa ") + (exit_code,rpm_output) = exec_cmd("rpm -qa ") + for pkg in pkg_list: if pkg not in rpm_output: missing_pkg.append(pkg) @@ -283,10 +306,10 @@ def install_missing_packages(missing_pkg): Install missing packages """ for pkg in missing_pkg: - os.system("yum install -y " + pkg) + #os.system("yum install -y " + pkg) + exec_cmd("yum install -y " + pkg) def enable_nested(vendor): - print "Enabling nested virtualization..." filename = "/etc/modprobe.d/kvm-" + vendor + ".conf" file = open(filename,"a") file.write("options kvm-" + vendor + " nested=y" ) @@ -297,15 +320,15 @@ def reload_kvm(vendor): reload kvm """ mod = "kvm-" + vendor - print "Reloading kvm kernel module" - os.system("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) - + #os.system("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) + (exit_code,output) = exec_cmd("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) + def enable_service(service): """ enable service """ - os.system("systemctl enable " + service + "; systemctl restart " + service ) - + #os.system("systemctl enable " + service + "; systemctl restart " + service ) + exec_cmd("systemctl enable " + service + "; systemctl restart " + service ) def check_configure_ipv6_networking(): with open('/etc/sysctl.conf', 'r') as content_file: @@ -319,14 +342,17 @@ def configure_ipv6_networking(): file = open("/etc/sysctl.conf","a") file.write("net.ipv6.conf.all.accept_ra=2" ) file.close() - os.system("sysctl -p") + #os.system("sysctl -p") + print exec_cmd("sysctl -p") def check_user(username): """ Check if user exists in passwd """ msg="" - uid = commands.getoutput("id -u " + username) + #uid = commands.getoutput("id -u " + username) + (exit_code,uid) = exec_cmd("id -u " + username) + if "no such user" in uid: msg = "\'"+username+"\'"+ " username doesn't exists" return msg @@ -395,3 +421,12 @@ def fix_configuration(username,envs_dir,config_dict): reload_kvm(vendor) enable_service("libvirtd") + +def exec_cmd(cmd): + """ + Execute the requested command and return list with the cmd exit code and the output written to stdout/stderr. + """ + (exit_code,output)= commands.getstatusoutput(cmd) + #print "Exit code:" + str(exit_code) + #print "Exit code:" + str(output) + return exit_code,output \ No newline at end of file From 4c45a4605e4fbaf059b26e07325b1a632aa298ca Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 18:31:56 +0300 Subject: [PATCH 15/22] removed copmments --- lago/verify_configuration.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 708ca33c..7c96256e 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -132,12 +132,10 @@ def get_cpu_vendor(): """ Get the CPU vendor ie. intel/amd """ - #Input = commands.getoutput("lscpu | awk '/Vendor ID/{print $3}'") (exit_code,Input ) = exec_cmd("lscpu | awk '/Vendor ID/{print $3}'") if Input == 'GenuineIntel': vendor = "intel" elif vendor == 'AuthenticAMD': - #print "amd" vendor = "amd" else: vendor = "problem" @@ -147,7 +145,6 @@ def is_virtualization_enable(): """ Check if Virtualization enabled """ - #res = commands.getoutput("cat /proc/cpuinfo | egrep 'vmx|svm'") (exit_code,res) = exec_cmd("cat /proc/cpuinfo | egrep 'vmx|svm'") if res == "": @@ -161,7 +158,6 @@ def check_kvm_configure(vendor): Check if KVM configure """ cmd = "lsmod | grep kvm_"+vendor - # res = commands.getoutput("lsmod | grep kvm_"+vendor) (exit_code,res) = exec_cmd("lsmod | grep kvm_"+vendor) if res == "": @@ -176,7 +172,6 @@ def check_nested(vendor): """ mod="kvm_"+vendor cmd = "cat /sys/module/"+mod+"/parameters/nested" - # is_enabled= commands.getoutput(cmd) (exit_code,is_enabled) = exec_cmd(cmd) if is_enabled == 'Y': return 'Y' @@ -188,12 +183,9 @@ def check_groups(username): Check the groups are confiugre correct for LAGO """ ## all groups username in - #groups_username = commands.getoutput("groups " + username) cmd = "groups " + username (exit_code,groups_username) = exec_cmd(cmd) - status_username = all(x in groups_username for x in ['qemu','libvirt','lago',username]) - #groups_qemu = commands.getoutput("groups qemu") cmd = "groups qemu" (exit_code,groups_qemu) = exec_cmd(cmd) status_qemu = all(x in groups_qemu for x in [username]) @@ -206,9 +198,7 @@ def change_groups(username): """ Update the groups according to LAGO permissions """ - #os.system("usermod -a -G qemu,libvirt,lago " + username) exec_cmd("usermod -a -G qemu,libvirt,lago " + username) - #os.system("usermod -a -G " + username + " qemu" ) exec_cmd("usermod -a -G " + username + " qemu") def check_home_dir_permmisions(): @@ -225,7 +215,6 @@ def check_home_dir_permmisions(): def change_home_dir_permissions(): _USERNAME = os.getenv("SUDO_USER") or os.getenv("USER") _HOME = os.path.expanduser('~'+_USERNAME) - #os.system("chmod g+x " + _HOME ) exec_cmd("chmod g+x " + _HOME) @@ -239,19 +228,15 @@ def remove_write_permissions(path): NO_GROUP_WRITING = ~stat.S_IWGRP NO_OTHER_WRITING = ~stat.S_IWOTH NO_WRITING = NO_USER_WRITING & NO_GROUP_WRITING & NO_OTHER_WRITING - current_permissions = stat.S_IMODE(os.lstat(path).st_mode) os.chmod(path, current_permissions & NO_WRITING) - def check_permissions(envs_dirs,username): """ Check directory permissions """ status = True - #uid = int(commands.getoutput("id -u " + username) ) (exit_code,uid) = exec_cmd("id -u " + username) - #gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) (exit_code,gid) = exec_cmd("getent group " + username + " | awk -F: '{print $3}'") for dirpath, dirnames, filenames in os.walk(envs_dirs): if ( os.stat(dirpath).st_uid != int(uid) ) & (os.stat(dirpath).st_gid != int(gid)): @@ -271,9 +256,7 @@ def change_permissions(envs_dirs,username): """ Change directory permissions """ - #uid = int(commands.getoutput("id -u " + username) ) (exit_code,uid) = exec_cmd("id -u " + username) - #gid = int(commands.getoutput("getent group " + username + " | awk -F: '{print $3}'") ) (exit_code,gid) = exec_cmd("getent group " + username + " | awk -F: '{print $3}'") for dirpath, dirnames, filenames in os.walk(envs_dirs): os.chown(dirpath, int(uid), int(gid)) @@ -292,7 +275,6 @@ def check_packages_installed(): pkg_list = ["mysql-community-server","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] - #rpm_output = commands.getoutput("rpm -qa ") (exit_code,rpm_output) = exec_cmd("rpm -qa ") for pkg in pkg_list: @@ -306,7 +288,6 @@ def install_missing_packages(missing_pkg): Install missing packages """ for pkg in missing_pkg: - #os.system("yum install -y " + pkg) exec_cmd("yum install -y " + pkg) def enable_nested(vendor): @@ -320,14 +301,12 @@ def reload_kvm(vendor): reload kvm """ mod = "kvm-" + vendor - #os.system("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) (exit_code,output) = exec_cmd("modprobe -r " + mod + " ; modprobe -r kvm ; modprobe kvm ; modprobe " + mod ) def enable_service(service): """ enable service """ - #os.system("systemctl enable " + service + "; systemctl restart " + service ) exec_cmd("systemctl enable " + service + "; systemctl restart " + service ) def check_configure_ipv6_networking(): @@ -342,7 +321,6 @@ def configure_ipv6_networking(): file = open("/etc/sysctl.conf","a") file.write("net.ipv6.conf.all.accept_ra=2" ) file.close() - #os.system("sysctl -p") print exec_cmd("sysctl -p") def check_user(username): @@ -350,7 +328,6 @@ def check_user(username): Check if user exists in passwd """ msg="" - #uid = commands.getoutput("id -u " + username) (exit_code,uid) = exec_cmd("id -u " + username) if "no such user" in uid: @@ -381,7 +358,6 @@ def check_configuration(username,envs_dir): (config_dict['install_pkg'],missing_pkg) = check_packages_installed() config_dict['home_permissions'] = check_home_dir_permmisions() config_dict['ipv6_networking'] = check_configure_ipv6_networking() - #return (groups,nested,virtualization,lago_env_dir,kvm_configure,install_pkg,home_permissions,ipv6_networking) return config_dict def fix_configuration(username,envs_dir,config_dict): From 38fad218acba5255e9f65cceb899fda1a6947029 Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 18:46:16 +0300 Subject: [PATCH 16/22] fix the updated nested file --- lago/verify_configuration.py | 30 +++++++++--------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 7c96256e..c2bcce37 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -18,20 +18,6 @@ # Refer to the README and COPYING files for full details of the license # -# permissions -# group -# and configure -# ask the user to run with sudo - -#groups qemu,libvirt - USERNAME -#groups USERNAME - qemu - -#/var/lib/lago -# owner USERNAME:USERNAME -# systemctl restart libvirtd - - - import os import commands import argparse @@ -96,7 +82,6 @@ def fixLagoConfiguration(self,config_dict,verify_status): self.ipv6_networking = config_dict['ipv6_networking'] VerifyLagoStatus.verificationStatus = verify_status - def return_status(self,status): """ Display OK or Not-OK @@ -217,7 +202,6 @@ def change_home_dir_permissions(): _HOME = os.path.expanduser('~'+_USERNAME) exec_cmd("chmod g+x " + _HOME) - def remove_write_permissions(path): """Remove write permissions from this path, while keeping all other permissions intact. @@ -292,9 +276,13 @@ def install_missing_packages(missing_pkg): def enable_nested(vendor): filename = "/etc/modprobe.d/kvm-" + vendor + ".conf" - file = open(filename,"a") - file.write("options kvm-" + vendor + " nested=y" ) - file.close() + line_to_write="options kvm-" + vendor + " nested=y" + with open(filename, 'r') as content_file: + content = content_file.read() + if "line_to_write" not in content: + file = open(filename,"a") + file.write("options kvm-" + vendor + " nested=y" ) + file.close() def reload_kvm(vendor): """ @@ -379,8 +367,8 @@ def fix_configuration(username,envs_dir,config_dict): if (config_dict['install_pkg'] == 'N'): print "Trying to fix missing packages... " - # (install_pkg,missing_pkg) = check_packages_installed() - # install_missing_packages(missing_pkg) + (install_pkg,missing_pkg) = check_packages_installed() + install_missing_packages(missing_pkg) if (config_dict['home_permissions'] == 'N'): print "Trying to fix home permissions... " From 7ed280534b748bd6e04996ff189606c35c85970f Mon Sep 17 00:00:00 2001 From: Galit Date: Tue, 1 May 2018 19:09:52 +0300 Subject: [PATCH 17/22] remove extra printing, and fix output if pass after configuring not to print anything --- lago/cmd.py | 4 ++-- lago/verify_configuration.py | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/lago/cmd.py b/lago/cmd.py index b5482b98..9d0ad5e1 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -825,11 +825,11 @@ def do_setup( config_dict = check_configuration(username,envs_dir) (verify_status,list_not_configure) = validate_status(config_dict) verify_lago.fixLagoConfiguration(config_dict,verify_status) - LOGGER.error("Problem to configure: %s", str(list_not_configure)) - + if verify_status: sys.exit(0) else: + LOGGER.error("Problem to configure: %s", str(list_not_configure)) sys.exit(2) @lago.plugins.cli.cli_plugin(help="Dump configuration file") diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index c2bcce37..3e5d3e03 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -256,7 +256,7 @@ def check_packages_installed(): missing_pkg = [] status = "Y" if platform.linux_distribution()[0] == "CentOS Linux": - pkg_list = ["mysql-community-server","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + pkg_list = ["epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] (exit_code,rpm_output) = exec_cmd("rpm -qa ") @@ -272,8 +272,8 @@ def install_missing_packages(missing_pkg): Install missing packages """ for pkg in missing_pkg: - exec_cmd("yum install -y " + pkg) - + (exit_code,res)=exec_cmd("yum install -y " + pkg) + def enable_nested(vendor): filename = "/etc/modprobe.d/kvm-" + vendor + ".conf" line_to_write="options kvm-" + vendor + " nested=y" @@ -358,29 +358,29 @@ def fix_configuration(username,envs_dir,config_dict): - kvm virtualization """ if (config_dict['lago_env_dir'] == 'N'): - print "Trying to fix env_dir permissions... " + #print "Trying to fix env_dir permissions... " change_permissions(envs_dir,username) if (config_dict['groups'] == 'N'): - print "Trying to fix group permissions... " + #print "Trying to fix group permissions... " change_groups(username) if (config_dict['install_pkg'] == 'N'): - print "Trying to fix missing packages... " + #print "Trying to fix missing packages... " (install_pkg,missing_pkg) = check_packages_installed() install_missing_packages(missing_pkg) if (config_dict['home_permissions'] == 'N'): - print "Trying to fix home permissions... " + #print "Trying to fix home permissions... " change_home_dir_permissions() if (config_dict['ipv6_networking'] == 'N'): - print "Trying to fix ipv6 configuration... " + #print "Trying to fix ipv6 configuration... " configure_ipv6_networking() vendor = get_cpu_vendor() if (config_dict['nested'] == 'N'): - print "Trying to enable nested ... " + #print "Trying to enable nested ... " enable_nested(vendor) reload_kvm(vendor) From e202d70fce1b267d41a5442ce10cac679f637ddf Mon Sep 17 00:00:00 2001 From: Galit Date: Wed, 2 May 2018 06:18:42 +0300 Subject: [PATCH 18/22] change the message to print a link to documentation, if an auto fix can't be done --- lago/cmd.py | 3 ++- lago/verify_configuration.py | 34 ++++++++++++++++++---------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/lago/cmd.py b/lago/cmd.py index 9d0ad5e1..a0963256 100755 --- a/lago/cmd.py +++ b/lago/cmd.py @@ -811,7 +811,7 @@ def do_setup( verify_lago = VerifyLagoStatus(username,envs_dir,config_dict,verify_status) if (verify): - verify_lago.displayLagoStatus() + verify_lago.displayLagoStatus(True) if verify_status: sys.exit(0) else: @@ -830,6 +830,7 @@ def do_setup( sys.exit(0) else: LOGGER.error("Problem to configure: %s", str(list_not_configure)) + verify_lago.displayLagoStatus(False) sys.exit(2) @lago.plugins.cli.cli_plugin(help="Dump configuration file") diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 3e5d3e03..98a7492a 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -43,24 +43,26 @@ def __init__(self,username,envs_dir,config_dict,verify_status): self.ipv6_networking = config_dict['ipv6_networking'] VerifyLagoStatus.verificationStatus = verify_status - def displayLagoStatus(self): + def displayLagoStatus(self,display_list): """ Display Lago configuration status (OK/Not-OK) Verify Lago configuration """ - print "Configuration Status:" - print "=====================" - print "Username used by Lago: " + self.username - print "Environment directory used by Lago: " + self.envs_dir - print "Nested: " + self.return_status(self.nested) - print "Virtualization: " + self.return_status(self.virtualization) - print "Groups: " + self.return_status(self.groups) - print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) - print "Kvm Configure: " + self.return_status(self.kvm_configure) - print "All packages installed: " + self.return_status(self.install_pkg) - print "Home Directory permissions: " + self.return_status(self.home_permissions) - print "IPV6 configure: " + self.return_status(self.ipv6_networking) - - print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (display_list): + print "Configuration Status:" + print "=====================" + print "Username used by Lago: " + self.username + print "Environment directory used by Lago: " + self.envs_dir + print "Nested: " + self.return_status(self.nested) + print "Virtualization: " + self.return_status(self.virtualization) + print "Groups: " + self.return_status(self.groups) + print "Lago Environment Directory " + self.envs_dir + ": " + self.return_status(self.lago_env_dir) + print "Kvm Configure: " + self.return_status(self.kvm_configure) + print "All packages installed: " + self.return_status(self.install_pkg) + print "Home Directory permissions: " + self.return_status(self.home_permissions) + print "IPV6 configure: " + self.return_status(self.ipv6_networking) + + print "Status: " + str(VerifyLagoStatus.verificationStatus) + if (VerifyLagoStatus.verificationStatus == False): print "Please read configuration setup:" print " http://lago.readthedocs.io/en/latest/Installation.html#troubleshooting" @@ -256,7 +258,7 @@ def check_packages_installed(): missing_pkg = [] status = "Y" if platform.linux_distribution()[0] == "CentOS Linux": - pkg_list = ["epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + pkg_list = ["mysqll","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] (exit_code,rpm_output) = exec_cmd("rpm -qa ") From 9b05fbbd6927514e215abd42f8a17c57ef0dc294 Mon Sep 17 00:00:00 2001 From: Galit Date: Wed, 2 May 2018 06:31:12 +0300 Subject: [PATCH 19/22] added logger to commands --- lago/verify_configuration.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 98a7492a..f47ff83d 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -24,6 +24,11 @@ import sys import getpass import platform +import logging + +from .utils import LagoUserException + +LOGGER = logging.getLogger(__name__) class VerifyLagoStatus(object): """ @@ -360,29 +365,23 @@ def fix_configuration(username,envs_dir,config_dict): - kvm virtualization """ if (config_dict['lago_env_dir'] == 'N'): - #print "Trying to fix env_dir permissions... " change_permissions(envs_dir,username) if (config_dict['groups'] == 'N'): - #print "Trying to fix group permissions... " change_groups(username) if (config_dict['install_pkg'] == 'N'): - #print "Trying to fix missing packages... " (install_pkg,missing_pkg) = check_packages_installed() install_missing_packages(missing_pkg) if (config_dict['home_permissions'] == 'N'): - #print "Trying to fix home permissions... " change_home_dir_permissions() if (config_dict['ipv6_networking'] == 'N'): - #print "Trying to fix ipv6 configuration... " configure_ipv6_networking() vendor = get_cpu_vendor() if (config_dict['nested'] == 'N'): - #print "Trying to enable nested ... " enable_nested(vendor) reload_kvm(vendor) @@ -393,6 +392,10 @@ def exec_cmd(cmd): Execute the requested command and return list with the cmd exit code and the output written to stdout/stderr. """ (exit_code,output)= commands.getstatusoutput(cmd) + if (exit_code == 0): + LOGGER.debug("Running command '%s' succeeded", cmd) + else: + LOGGER.debug("Running command '%s' failed", cmd) #print "Exit code:" + str(exit_code) #print "Exit code:" + str(output) return exit_code,output \ No newline at end of file From e9732c2c119a9721e46f8e261a45aedd58697b01 Mon Sep 17 00:00:00 2001 From: Galit Date: Wed, 2 May 2018 07:44:36 +0300 Subject: [PATCH 20/22] fixed the string to be a parameter --- lago/verify_configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index f47ff83d..c68dd524 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -286,7 +286,7 @@ def enable_nested(vendor): line_to_write="options kvm-" + vendor + " nested=y" with open(filename, 'r') as content_file: content = content_file.read() - if "line_to_write" not in content: + if line_to_write not in content: file = open(filename,"a") file.write("options kvm-" + vendor + " nested=y" ) file.close() From d73752a2d3e24e3cfbb6d70a1fd0c1147bd3eaa5 Mon Sep 17 00:00:00 2001 From: Galit Date: Wed, 2 May 2018 08:19:47 +0300 Subject: [PATCH 21/22] removed test pkg --- lago/verify_configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index c68dd524..466f9fb0 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -263,7 +263,7 @@ def check_packages_installed(): missing_pkg = [] status = "Y" if platform.linux_distribution()[0] == "CentOS Linux": - pkg_list = ["mysqll","epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] + pkg_list = ["epel-release", "centos-release-qemu-ev", "python-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm-ev"] else: pkg_list = ["python2-devel", "libvirt", "libvirt-devel" , "libguestfs-tools", "libguestfs-devel", "gcc", "libffi-devel", "openssl-devel", "qemu-kvm"] (exit_code,rpm_output) = exec_cmd("rpm -qa ") From 2e86849d6bd01298745aa49d43865d82a9cb90f3 Mon Sep 17 00:00:00 2001 From: Galit Date: Wed, 2 May 2018 08:25:57 +0300 Subject: [PATCH 22/22] remove spaces --- lago/verify_configuration.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lago/verify_configuration.py b/lago/verify_configuration.py index 466f9fb0..330e87e9 100644 --- a/lago/verify_configuration.py +++ b/lago/verify_configuration.py @@ -65,7 +65,6 @@ def displayLagoStatus(self,display_list): print "All packages installed: " + self.return_status(self.install_pkg) print "Home Directory permissions: " + self.return_status(self.home_permissions) print "IPV6 configure: " + self.return_status(self.ipv6_networking) - print "Status: " + str(VerifyLagoStatus.verificationStatus) if (VerifyLagoStatus.verificationStatus == False): @@ -107,7 +106,6 @@ def validate_status(list_status): if 'N' in list_status.itervalues(): status = False list_not_configure = [k for k,v in list_status.iteritems() if v == 'N'] - return status,list_not_configure def check_virtualization():