PK9YEpython_essentials/__init__.pyPKAF python_essentials/mount_utils.pyPKEɒñ--*python_essentials/osm_postgis_transform.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # internal implementation notes: # - @TODO: handle i18n for pexpect # python-provided dependencies import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) import subprocess as sp import os import time import argparse import sys import shutil # project internal dependencies import python_essentials import python_essentials.lib import python_essentials.lib.pm_utils as pm_utils import python_essentials.lib.check_os as check_os import python_essentials.lib.postgis_utils as postgis_utils import python_essentials.lib.os_utils as os_utils # external dependencies try: import pexpect import plac except ImportError as ex: logger.error("import of one of the modules %s failed. Did you run the osm_postgis_transform_prequisites.py scripts?" % ["pexpect", "plac"]) pg_version = (9,2) # @TODO: read from config file (but keep programmatic default version (maybe share in module)) pg_version_string = str.join(".", [str(i) for i in pg_version]) postgis_version = (2,0) postgis_version_string = str.join(".", [str(i) for i in postgis_version]) initdb = "/usr/lib/postgresql/%s/bin/initdb" % pg_version_string postgres = "/usr/lib/postgresql/%s/bin/postgres" % pg_version_string psql = "/usr/lib/postgresql/%s/bin/psql" % pg_version_string createdb = "/usr/lib/postgresql/%s/bin/createdb" % pg_version_string osm2pgsql_number_processes = int(sp.check_output(["grep", "-c", "^processor", "/proc/cpuinfo"]).strip()) db_socket_dir = "/tmp" start_db_default = False db_host_default = "localhost" db_port_default = 5204 db_user_default = "postgis" db_password_default = "postgis" db_name_default = "postgis" osm2pgsql_default = "osm2pgsql" #config = python_essentials.create_config_parser(os.path.join(os.path.realpath(__file__), "..")) data_dir_default = os.path.join(os.environ["HOME"], "osm_postgis_db-9.2") #config.get('pathes', 'osm_postgis_dir_path', ) cache_size_default=1000 # the time the scripts (main thread) waits for the postgres server to be available and accepting connections (in seconds) postgres_server_start_timeout = 5 def a_list(arg): return arg.split(",") # fails by default because osm_files mustn't be empty # @args db_user when start_db is True used as superuser name, otherwise user to connect as to the database denotes by db_* parameter of this function @plac.annotations( osm_files=("a comma (`,`) separated list of OSM files to be passed to osm2pgsql (gunzipped files are accepted if osm2pgsql accepts them (the version installed by osm_postgis_transform_prequisites does))", "positional", None, a_list), skip_start_db=("Specify this flag in order to feed the data to an already running postgres process which the script will attempt to connect to with the parameters specified by `db-host`, `db-port`, `db-user`, `db-password` and `db-name` arguments.", "flag"), data_dir=("The directory which contains or will contain the data of the PostGIS database (see documentation of `-D` option in `man initdb` for further details). The directory will be created if it doesn't exist. If a file is passed as argument, the script will fail argument validation. The script will fail if the directory is an invalid PostGIS data directory (e.g. one which allows partial start of a `postgres` process but contains invalid permissions or misses files). As soon as a non-empty directory is passed as argument, it is expected to be a valid PostGIS data directory! If the script fails due to an unexpected error, YOU have to take care of cleaning that directory from anything besides the stuff inside before the script has been invoked!", "option"), db_host=("The host where the nested database process should run (has to be specified if default value isn't reachable) or the host where to reach the already running postgres process (see --start-db for details)", "option"), db_port=("The port where the nested database process will be listening (has to be specified if the port denoted by the default value is occupied) or the port where to reach the already running postgres process (see --start-db for details)", "option"), db_user=("name of user to use for authentication at the database (will be created if database doesn't exist) (see --start-db for details)", "option"), db_password=("password for the user specified with `--db-user` argument to use for authentication at the database (will be set up if database doesn't exist) (see --start-db for details)", "option"), db_name=("name of the database to connect to or to be created (see --start-db for details)", "option"), cache_size=("size of osm2pgsql cache (see `--cache` option of `man osm2pgsql`)", "option"), osm2pgsql=("optional path to a osm2pgsql binary", "option"), ) def osm_postgis_transform(osm_files, skip_start_db, data_dir=data_dir_default, db_host=db_host_default, db_port=db_port_default, db_user=db_user_default, db_password=db_password_default, db_name=db_name_default, cache_size=cache_size_default, osm2pgsql=osm2pgsql_default): # the text for the help transformed by plac: """ This script sets up PostGIS database with data from an OSM (.osm) file. It is essentially a wrapper around `osm2pgsql`. By default it will either spawn a database process based on the data directory speified with the `--data-dir` argument (if the data directory is non-empty) or create a database data directory and spawn a database process based on that newly created data directory and feed data to it. If the nested database process can't be connected to with the default value for database connection parameters, they have to be overwritten, otherwise the script will fail with the error message of the `postgres` process. The start of a nested database process can be skipped if `--skip-start-db` command line flag is set. In this case the database connection parameters will be used to connect to an external already running `postgres` process where data will be fed to. WARNING: The script has not yet been tested completely to hide database credentials (including the password) from output and/or other logging backends (files, syslog, etc.). It is currently recommended to specify a separate database and local host for the script only and to not care about it at all (as OSM data is as far from a secret as it could be). """ if osm_files is None: raise ValueError("osm_files mustn't be None") if str(type(osm_files)) != "": raise ValueError("osm_files has to be a list") if len(osm_files) == 0: raise ValueError("osm_files mustn't be empty") if pg_version == (9,2): if postgis_version > (2,0): raise ValueError("postgis > %s is not compatible with postgresql %s" % (postgis_version_string, pg_version_string)) if data_dir is None: raise ValueError("data_dir mustn't be None") if os.path.exists(data_dir) and not os.path.isdir(data_dir): raise ValueError("data_dir '%s' exists, but isn't a directory" % (data_dir,)) # always check, even after install_prequisites #@TODO: not sufficient to binary name; necessary to evaluate absolute path with respect to $PATH if os_utils.which(osm2pgsql) is None: raise RuntimeError("osm2pgsql not found, make sure you have invoked osm_postgis_transform_prequisites.py") # parsing # postgres binary refuses to run when process uid and effective uid are not identical postgres_proc = None try: if not skip_start_db: # database process is either started by postgis_utils.bootstrap_datadir or with pexpect.spawn if the data_dir isn't empty (indicating start of database based on existing data directory) if not os.path.exists(data_dir) or len(os.listdir(data_dir)) == 0: logger.info("creating PostGIS data directory in data_dir '%s'" % (data_dir,)) if not os.path.exists(data_dir): logger.info("creating inexisting data_dir '%s'" % (data_dir,)) os.makedirs(data_dir) postgis_utils.bootstrap_datadir(data_dir, db_user, password=db_password, initdb=initdb) postgis_utils.bootstrap_database(data_dir, db_port, db_host, db_user, db_name, password=db_password, initdb=initdb, postgres=postgres, createdb=createdb, psql=psql, socket_dir=db_socket_dir) if postgres_proc is None: logger.info("spawning database process based on existing data directory '%s'" % (data_dir,)) postgres_proc = pexpect.spawn(str.join(" ", [postgres, "-D", data_dir, "-p", str(db_port), "-h", db_host, "-k", db_socket_dir])) postgres_proc.logfile = sys.stdout logger.info("sleeping %s s to ensure postgres server started" % postgres_server_start_timeout) time.sleep(postgres_server_start_timeout) # not nice (should poll connection until success instead) logger.debug("using osm2pgsql binary %s" % osm2pgsql) osm2pgsql_proc = pexpect.spawn(str.join(" ", [osm2pgsql, "--create", "--database", db_name, "--cache", str(cache_size), "--number-processes", str(osm2pgsql_number_processes), "--slim", "--port", str(db_port), "--host", db_host, "--username", db_user, "--latlong", "--password", "--keep-coastlines", "--extra-attributes", "--hstore-all"]+osm_files)) osm2pgsql_proc.logfile = sys.stdout osm2pgsql_proc.expect(['Password:', "Passwort:"]) osm2pgsql_proc.sendline(db_password) osm2pgsql_proc.timeout = 100000000 osm2pgsql_proc.expect(pexpect.EOF) except Exception as ex: logger.error(ex) finally: if not postgres_proc is None: postgres_proc.terminate() # there's no check for subprocess.Popen # whether it is alive, subprocess.Popen.terminate can be # invoked without risk on a terminated process # internal implementation notes: # - it would be nicer to validate the data directory rather than simply expect # it to be valid if it is non-empty if __name__ == "__main__": plac.call(osm_postgis_transform) PKE N96python_essentials/osm_postgis_transform_prequisites.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # python-provided dependencies import sys import os import subprocess as sp import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) # project internal dependencies import python_essentials import python_essentials.lib import python_essentials.lib.pm_utils as pm_utils import python_essentials.lib.check_os as check_os import python_essentials.lib.postgis_utils as postgis_utils import python_essentials.lib.os_utils as os_utils # external dependencies import plac postgis_src_dir_name="postgis-2.1.1" postgis_url_default = "http://download.osgeo.org/postgis/source/postgis-2.1.1.tar.gz" postgis_src_archive_name = "postgis-2.1.1.tar.gz" postgis_src_archive_md5 = "4af86a39e2e9dbf10fe894e03c2c7027" postgis_jdbc_name = "postgis-jdbc-2.1.0SVN.jar" @plac.annotations( skip_database_installation=("whether to skip installation ofpostgresql and postgis related prequisites", "flag"), skip_apt_update=("whether (possibly time consuming) invokation of apt-get update ought to be skipped (if have reason to be sure that your apt sources are quite up-to-date, e.g. if you invoked apt-get 5 minutes ago", "flag"), postgis_url=("The URL where the postgis tarball ought to be retrieved", "option") ) def install_prequisites(skip_database_installation, skip_apt_update, postgis_url=postgis_url_default,): if check_os.check_ubuntu() or check_os.check_debian(): if skip_database_installation: pm_utils.install_packages(["osm2pgsql"], package_manager="apt-get", skip_apt_update=skip_apt_update, assume_yes=False) else: release_tuple = check_os.findout_release_ubuntu_tuple() install_postgresql(skip_apt_update=skip_apt_update) else: if skip_database_installation: raise RuntimeError("implement simple installation of only prequisite osm2pgsql") else: install_postgresql(skip_apt_update=skip_apt_update) def install_postgresql(skip_apt_update, pg_version=(9,2),): if check_os.check_ubuntu() or check_os.check_debian() or check_os.check_linuxmint(): # only add apt source for ubuntu and debian and fail in linuxmint if # release is < 17 where the packages are available if check_os.check_ubuntu() or check_os.check_debian(): if check_os.check_ubuntu(): release_tuple = check_os.findout_release_ubuntu_tuple() if release_tuple > (12,4) and release_tuple < (13,10): release = "precise" # repository provides for precise, saucy and trusty else: release = check_os.findout_release_ubuntu() elif check_os.check_debian(): release = check_os.findout_release_debian() elif check_os.check_linuxmint(): release = check_os.findout_release_linuxmint() if release < 17: raise RuntimeError("linuxmint releases < 17 aren't supported") else: raise RuntimeError("operating system not supported") apt_url = "http://apt.postgresql.org/pub/repos/apt/" distribution = "%s-pgdg" % release component = "main" if not pm_utils.check_apt_source_line_added(uri=apt_url, component=component, distribution=distribution, the_type="deb", augeas_root="/",): postgresql_sources_file_path = "/etc/apt/sources.list.d/postgresql.list" logger.info("adding postgresql apt source file '%s'" % (postgresql_sources_file_path,)) postgresql_sources_file = open(postgresql_sources_file_path, "w") postgresql_sources_file.write("deb %s %s %s" % (apt_url, distribution, component, )) postgresql_sources_file.flush() postgresql_sources_file.close() os.system("wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -") pm_utils.invalidate_apt() pg_version_string = str.join(".", [str(x) for x in pg_version]) try: pm_utils.install_packages([ "postgresql-%s" % pg_version_string, "postgresql-%s-postgis-2.1" % pg_version_string, "postgresql-%s-postgis-2.1-scripts" % pg_version_string, "postgresql-contrib-%s" % pg_version_string, "postgresql-client-common", # version independent, no package per version ], package_manager="apt-get", skip_apt_update=skip_apt_update) except sp.CalledProcessError as ex: logger.info("postgresql installation failed (which MIGHT be caused by breakage of apt package in Ubuntu 13.10") #pm_utils.remove_packages(["postgresql", "postgresql-common"], package_manager="apt-get", skip_apt_update=skip_apt_update) #postgresql_deb_path = os.path.join(tmp_dir, postgresql_deb_name) #if not check_file(postgresql_deb_path, postgresql_deb_md5): # do_wget(postgresql_deb_url, postgresql_deb_path) # sp.check_call([dpkg, "-i", postgresql_deb_path]) psql = "/opt/postgres/%s/bin/psql" % pg_version_string initdb = "/opt/postgres/%s/bin/initdb" % pg_version_string createdb = "/opt/postgres/%s/bin/createdb" % pg_version_string postgres = "/opt/postgres/%s/bin/postgres" % pg_version_string # osmpgsql pm_utils.install_packages(["osm2pgsql"], package_manager="apt-get", skip_apt_update=skip_apt_update) elif check_os.check_opensuse(): if pg_version == (9,2): sp.check_call([zypper, "install", "postgresql", "postgresql-contrib", "postgresql-devel", "postgresql-server"]) psql = "/usr/lib/postgresql92/bin/psql" initdb = "/usr/lib/postgresql92/bin/initdb" createdb = "/usr/lib/postgresql92/bin/createdb" postgres = "/usr/lib/postgresql92/bin/postgres" else: # better to let the script fail here than to get some less comprehensive error message later raise RuntimeError("postgresql version %s not supported" % str.join(".", [str(x) for x in pg_version])) else: # better to let the script fail here than to get some less comprehensive error message later raise RuntimeError("operating system not supported!") if __name__ == "__main__": plac.call(install_prequisites) PK ZE!python_essentials/lib/__init__.pyPK2!Euc$$(python_essentials/lib/file_line_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . import re import os # searches for lines which match old_line_re and replace every occurance with new_line and writes the changes back to the file. If no lines match, nothing happens effectively def file_replace_line(file_path, old_line_re, new_line): if file_path == None: raise ValueError("file_path mustn't be None") if not os.path.exists(file_path): create_file_wrapper(file_path) file_obj = open(file_path,"r") file_lines = file_obj.readlines() file_obj.close() new_file_lines = [] for file_line in file_lines: if re.match(old_line_re, file_line) != None: new_file_lines.append(new_line) else: new_file_lines.append(file_line) file_obj = open(file_path, "w") file_obj.writelines(new_file_lines) file_obj.close() # retrieves the entry in the columnth column in the line which has probably been retrieved from a file with lines in form of a table separated by one or more characters matching whitespace def retrieve_column_from_line(line, column, whitespace="\\s"): result = re.findall("[^"+whitespace+"]+", line) # findall finds non-overlapping matches if len(result) <= column: raise ValueError("the requested column doesn't match the number of columns in the specified line") return result[column]# creates file_ if it doesn't exist def retrieve_column_values(output, column_count, comment_symbol="#"): output_lines0 = filter_output_lines(output, comment_symbol=comment_symbol) ret_value = [] for output_line in output_lines0: column_value = retrieve_column_from_line(output_line, column_count) ret_value.append(column_value) return ret_value # reads file_ and returns the return value of output_lines invoked with the read file content and the specified comment_symbol # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) def file_lines(file_, comment_symbol="#"): if file_ == None: raise ValueError("file_ mustn't be None") if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") file_obj = open(file_,"r") file_content = file_obj.read() file_obj.close() file_lines = file_content.split("\n") ret_value = filter_output_lines(file_lines, comment_symbol) return ret_value # removes all empty lines and lines which start with comment_symbol (after eventual whitespace) from lines which is supposed to be splitted content of a file or splitted output of a command # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) def filter_output_lines(lines, comment_symbol="#"): if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") if str(type(lines)) != "" and str(type(lines)) != "": raise ValueError("lines %s isn't a list" % (lines,)) ret_value = [] for i in lines: i = i.strip() if comment_symbol is None: if i != "": ret_value.append(i) else: if not re.match("[\\s]*"+comment_symbol+".*", i) and re.match("[\\s]+", i) == None and i != "": if not comment_symbol in i: ret_value.append(i) else: ret_value.append(i[:i.find(comment_symbol)]) return ret_value # finds all lines in file_ which match pattern. # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) # @return a list with all line which match pattern or an empty list if none matches pattern, never None # @see file_lines_match for a fail-fast behavior which just indicates whether a matching line has been found using a boolean def file_lines_matches(file_,pattern,comment_symbol="#"): if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") retvalue = [] filelines = file_lines(file_,comment_symbol) # comment lines are already skipped here return output_lines_matches(filelines,pattern,comment_symbol=comment_symbol) def output_lines_matches(lines, pattern, comment_symbol="#"): retvalue = [] lines = filter_output_lines(lines, comment_symbol=comment_symbol) # remove comment lines and for line in lines: if re.match(pattern,line) != None: retvalue.append(line) return retvalue # checks whether at least one (non-commented) line matches pattern and returns True if this is the case. The method does the same like file_lines_matches with fail-fast behavior. # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) # @return True if at least one line matches pattern, False otherwise def file_lines_match(file_, pattern, comment_symbol="#"): if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") file_lines0 = file_lines(file_, comment_symbol=comment_symbol) # comment lines are already skipped here return output_lines_match(file_lines0, pattern, comment_symbol=comment_symbol) # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) def output_lines_match(lines, pattern, comment_symbol="#"): if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") lines = filter_output_lines(lines,comment_symbol=comment_symbol) for line in lines: if re.match(pattern, line): return True return False # @args line the line to be commented out (can be a regular expression or a literal) (leading and trailing whitespace in lines in the file will be ignored) # @args comment_symbol can be None in order to include all lines, must not be the empty string '' (ValueError will be raised) def comment_out(file_path, line, comment_symbol): if comment_symbol == "": raise ValueError("comment_symbol mustn't be the empty string ''") new_lines = [] file_lines0 = file_lines(file_path, comment_symbol=None) for file_line in file_lines0: if not re.match("[\\s]*%s[\\s]*" % line): new_lines.append(file_line) else: new_lines.append("%s %s" % (comment_symbol, line)) file_obj = open(file_path, "rw+") for new_line in new_lines: file_obj.write("%s\n" % new_line) file_obj.flush() file_obj.close() # @args line the line to be commented in (can be a regular expression or a literal) def comment_in(file_path, line, comment_symbol): new_lines = [] file_lines0 = file_lines(file_path, comment_symbol=None) for file_line in file_lines0: if not re.match("[\\s]*%s[\\s]*%s" % (comment_symbol, line), line): new_lines.append(file_line) else: new_lines.append(re.search(line, file_line).group(0)) file_obj = open(file_path, "w") for new_line in new_lines: file_obj.write("%s\n" % new_line) file_obj.flush() file_obj.close() def create_file_wrapper(path): if not check_os.check_python3(): file_obj = open(path, "w") # opening for writing or appending creates a non-existant file file_obj.close() else: file_obj = open(path, "x") file_obj.close() PKH"EܽB((#python_essentials/lib/file_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # # The retrieve_hash function code was authored by stack exchange user # omnifarious (http://stackoverflow.com/users/167958/omnifarious) on # stackoverflow.com import os import shutil import subprocess as sp import logging import re import hashlib logger = logging.getLogger("file_utils") logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) # choose backup postfix from a sequence of numbers BACKUP_TYPE_SEQUENCE = 1 # choose the SHA1 sum of the file to be backed up and skip existing backups based on the file name BACKUP_TYPE_SHA1 = 2 BACKUP_TYPE_NONE = 3 # should be a digit in order to make it parsable from command line (when the setup is enforced with a positional parameter) backup_type_default= BACKUP_TYPE_SEQUENCE backup_types = [BACKUP_TYPE_SEQUENCE, BACKUP_TYPE_SHA1, BACKUP_TYPE_NONE] ################################################################################ # link tools # ################################################################################ # If link_name doesn't exist, creates link to point_to named link_name, otherwise checks # 1.) if existing link_name is a link and points already to point_to # 2.) or a link pointing to another location than point_to # and skips if 1. is the case and overwrites link_name if 2. is the case. Creates a backup of overwritten files or links if backup is True. # @args backup creates a backup in the form filename.bk where N is the minimal number denoting a filesname which doesn't exist. Backup is not created if the content of point_to and link_name are identical. If link_name is a link, the link target is backed up (because backing up the link doesn't make sense). # @return False if the link could not be created due to existance of the link_name (and force being False). def checked_link(point_to, link_name, force=False, backup_type=backup_type_default): if not backup_type in backup_types: raise ValueError("backup_type '%s' isn't one of %s" % (backup_type,backup_types)) target_parent = os.path.abspath(os.path.join(link_name, '..')) if not os.path.lexists(target_parent): os.makedirs(target_parent) if not os.path.lexists(link_name): os.symlink(point_to,link_name) return True else: if not force: return False if os.path.islink(link_name) and os.path.realpath(link_name) == point_to: return True if os.path.exists(link_name): if backup_type != BACKUP_TYPE_NONE: backup_file(link_name, backup_type=backup_type) else: if not os.path.isdir(link_name) or os.path.islink(link_name): os.remove(link_name) else: shutil.rmtree(link_name) os.symlink(point_to, link_name) return True # implementation notes: # - skipping backup if file content are identical is not useful because if was initially intended to avoid endless identical backups of links which now handled smarter by skipping link creation when link exists and points to the source already ################################################################################ # Write tools # ################################################################################ def write_file(file0, what): if os.path.isdir(file0): raise ValueError("file %s is a directory" % file0) if lazy_newline and not what.endswith("\n"): what = "%s\n" % what file_obj = open(file0, 'w') file_obj.write(what) file_obj.flush() file_obj.close() # appends what to file with path file0. Creates file if it doesn't exist. Newline character is appended if lazy_newline is True and what doesn't end with a newline yet. # raises ValueError if file0 is a directory def append_file(file0, what, lazy_newline=True): if os.path.isdir(file0): raise ValueError("file %s is a directory" % file0) if lazy_newline and not what.endswith("\n"): what = "%s\n" % what file_obj = open(file0, 'a') file_obj.write(what) file_obj.flush() file_obj.close() def check_dir(dir_path): if not os.path.exists(dir_path): return False if not os.path.isdir(dir_path): raise ValueError("dir_path has to point to a directory") if len(os.listdir(dir_path)) == 0: logger.debug("%s is empty" % dir_path) return False return True def create_dir(dir_path, allow_content): if os.path.exists(dir_path): if os.path.isdir(dir_path): if len(os.listdir(dir_path)) > 0 and not allow_content: raise RuntimeError("%s exists and has content, a directory is supposed to be created. Remove or move the content of the directory" % dir_path) else: # file raise RuntimeError("%s exists, a directory is supposed to be created. Remove or move the file" % dir_path) else: os.makedirs(dir_path) def check_file(file_path, md5sum): if not os.path.exists(file_path): return False if not os.path.isfile(file_path): raise ValueError("file_path has to point to a file") retrieved_md5sum = retrieve_hash(file_path) if not retrieved_md5sum == md5sum: logger.debug("%s has md5 sum %s instead of %s" % (file_path, retrieved_md5sum, md5sum)) return False return True def retrieve_hash(file_path, hasher="md5", blocksize=65536): afile = open(file_path, "r") hasher_instance = hashlib.new(hasher) buf = afile.read(blocksize) while len(buf) > 0: hasher_instance.update(buf) buf = afile.read(blocksize) afile.close() ret_value = hasher_instance.hexdigest() return ret_value # @return the XOR of all file content, name and directory name hashes in order # to track any changes (even one simple rename of a file or directory) def retrieve_hash_recursive(dir_path, hasher="md5", blocksize=65536): logger.debug("generating recursive hash for '%s'" % (dir_path,)) if not os.path.isdir(dir_path): raise ValueError("dir_path '%s' isn't a directory" % (dir_path,)) ret_value = 0 for dirpath, dirnames, filenames in os.walk(dir_path): for filename in filenames: current_file_path = os.path.join(dirpath, filename) hasher_instance = hashlib.new(hasher) hasher_instance.update(current_file_path) ret_value = ret_value ^ int(hasher_instance.hexdigest(), base=16) ret_value = ret_value ^ int(retrieve_hash(current_file_path, hasher=hasher, blocksize=65536), base=16) for dirname in dirnames: current_dir_path = os.path.join(dirpath, dirname) hasher_instance = hashlib.new(hasher) hasher_instance.update(current_dir_path) ret_value = ret_value ^ int(hasher_instance.hexdigest(), base=16) return str(ret_value) # moves the file to a backup location (i.e. file_path no longer exists after # the function run) # @args file_path a path of an existing file or directory def backup_file(file_path, backup_type=backup_type_default, follow_links=False): if not os.path.exists(file_path): raise ValueError("file_path '%s' doesn't exist, backup_file only accepts an existing file or directory as argument" % (file_path,)) if backup_type == BACKUP_TYPE_SHA1: if os.path.isdir(file_path): file_sha1 = retrieve_hash_recursive(file_path, hasher="sha1") else: file_sha1 = retrieve_hash(file_path, hasher="sha1") file_backup_path = "%s.bk-%s" % (file_path, file_sha1) if not os.path.exists(file_backup_path): os.rename(os.path.realpath(file_path), file_backup_path) elif backup_type == BACKUP_TYPE_SEQUENCE: renamepostfix = ".bk" renamepostfixcount = 0 backup_file = file_path if follow_links: backup_file = os.path.realpath(file_path) if os.path.lexists(backup_file): renamepostfix = ".bk%d" % renamepostfixcount renamepostfixcount += 1 while os.path.lexists(backup_file+renamepostfix): renamepostfix = ".bk%d" % renamepostfixcount renamepostfixcount += 1 target_backup_path = backup_file+renamepostfix logger.debug("backing up %s to %s" % (backup_file, target_backup_path)) os.rename(backup_file, target_backup_path) else: raise ValueError("backup_type '%s' isn't supported" % (backup_type,)) PKS HFϒXtv!v!$python_essentials/lib/mount_utils.py#!/usr/bin/python # coding: utf-8 # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . import argparse import subprocess as sp import os import sys import re import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) import file_line_utils # binaries mount_default = "mount" bash = "dash" ifconfig = "ifconfig" losetup = "losetup" partprobe = "partprobe" btrfs = "btrfs" umount = "umount" IMAGE_MODE_PT="partition-table" IMAGE_MODE_FS="file-system" image_modes = [IMAGE_MODE_PT, IMAGE_MODE_FS] MOUNT_MODE_NFS = 1 MOUNT_MODE_CIFS = 2 mount_mode_default = MOUNT_MODE_CIFS def mount_dsm_sparse_file(shared_folder_name, image_mount_target, network_mount_target, image_file_name, remote_host, username, uid=1000, gid=1000, mount_mode=mount_mode_default, credentials_file=None, mount=mount_default): """a wrapper around `mount_sparse_file` and different remote mount methods (NFS, cifs, etc.) (sparse file support is horrible for all of them...). It has been written to deal with Synology DSM 5.0 (path specifications, etc.). `credentials_file` can be used of the `credentials` option of `mount.cifs` will be passed to the `mount` command, if `None` the `username` option with value will be passed to the `mount` command which will request the password from input at a prompt. `uid` and `gid` are values for options of `mount.cifs` (which default to Ubuntu defaults for the first user).""" if mount_mode == MOUNT_MODE_NFS: lazy_mount(source="%s:/volume1/%s" % (remote_host, shared_folder_name), target=network_mount_target, fs_type="nfs", options_str="nfsvers=4", mount=mount) # handles inexistant target # omitting nfsvers=4 causes 'mount.nfs: requested NFS version or transport protocol is not supported' (not clear with which protocol this non-sense error message refers to) elif mount_mode == MOUNT_MODE_CIFS: if credentials_file is None: options_str="username=%s,rw,uid=%d,gid=%d" % (username, uid, gid, ) else: if not os.path.exists(credentials_file): raise ValueError("credentials_file '%s' doesn't exist" % (credentials_file,)) options_str="credentials=%s,rw,uid=%d,gid=%d" % (credentials_file, uid, gid, ) lazy_mount(source="//%s/%s" % (remote_host, shared_folder_name), target=network_mount_target, fs_type="cifs", options_str=options_str, mount=mount) # handles inexistant target else: raise ValueError("mount_mode '%s' not supported" % (mount_mode,)) mount_sparse_file( image_file=os.path.join(network_mount_target, image_file_name), image_mount_target=image_mount_target, image_mode=IMAGE_MODE_FS, mount=mount ) def mount_sparse_file(image_file, image_mount_target, image_mode=IMAGE_MODE_FS, mount=mount_default): """Handles mounting `image_file` at `image_mount_target` according to `image_mode` which determines the remote filesystem to use.""" image_file_loop_dev = losetup_wrapper(image_file) if image_mode == IMAGE_MODE_PT: sp.check_call([partprobe, image_file_loop_dev]) lazy_mount("%sp1" % image_file_loop_dev, image_mount_target, "btrfs", mount=mount) sp.check_call([btrfs, "device", "scan", "%sp1" % image_file_loop_dev]) # scan fails if an image with a partition table is mounted at the loop device -> scan partitions elif image_mode == IMAGE_MODE_FS: lazy_mount(image_file_loop_dev, image_mount_target, "btrfs", mount=mount) sp.check_call([btrfs, "device", "scan", image_file_loop_dev]) # do this always as it doesn't fail if not btrfs image # has been mounted and doesn't take a lot of time -> no need to add # a parameter to distungish between btrfs and others (would be more # elegant though) else: raise ValueError("image_mode has to be one of %s, but is %s" % (str(image_modes), image_mode)) def unmount_sparse_file(mount_target): """Unmounts the parse file which has been mounted under `mount_target` and removes the association of that sparse file with its loop device. The loop device will be determined automatically based on `losetup`.""" mount_source = get_mount_source(mount_target) if mount_source is None: raise ValueError("mount_target '%s' isn't using a loop device" % (mount_target,)) logger.info("mount_target '%s' was using loop device '%s'" % (mount_target, mount_source)) sp.check_call([umount, mount_target]) sp.check_call([losetup, "-d", mount_source]) def get_mount_source(mount_target): """Determines the directory or filesystem which is mounted under `mount_target` and returns it or `None` is no directory of filesystem is mounted under `mount_target`.""" for mount_source, mount_target0 in [tuple(re.split("[\\s]+", x)[0:2]) for x in file_line_utils.file_lines("/proc/mounts", comment_symbol="#")]: if mount_target0 == mount_target: return mount_source return None def losetup_wrapper(file): """A wrapper around finding the next free loop device with `losetup` and associating `file` with it with one function call. Returns the found loop device `file` has been associated to.""" try: loop_dev = sp.check_output([losetup, "-f"]).decode("utf-8").strip() except sp.CalledProcessError as ex: raise RuntimeError("no free loop device") sp.check_call([losetup, loop_dev, file]) return loop_dev def check_mounted(source, target, mount=mount_default): """Checks whether `source` is mounted under `target` and `True` if and only if that's the case - and `False` otherwise.""" mount_lines = sp.check_output([mount]).decode("utf-8").strip().split("\n") # open("/proc/mounts", "r").readlines() is not compatible with FreeBSD for mount_line in mount_lines: mount_line_split = mount_line.split(" ") target0 = mount_line_split[1] if target0 == target: # don't check equality of source with (1st column of) mount output because multiple usage of mount target isn't possible and therefore the check should already succeed if the mount target is used by a (possibly other) mount source return True return False def lazy_mount(source, target, fs_type, options_str=None, mount=mount_default): """Checks if `source` is already mounted under `target` and skips (if it is) or mounts `source` under `target` otherwise as type `fs_type`. Due to the fact that the type can be omitted for certain invokations of `mount` (e.g. `mount --bind`), this function allows `fs_type` to be `None` which means no type will be specified. Uses `mount` as binary for the mount command.""" if check_mounted(source, target, mount=mount): return if not os.path.lexists(target): if os.path.isfile(source): os.mknod(target, mode=0o0755) else: os.makedirs(target) cmds = [mount] if fs_type != None and fs_type != "": cmds += ["-t", fs_type,] if not options_str is None and options_str != "": cmds += ["-o", options_str] cmds += [ source, target] sp.check_call(cmds) PK}(BF۝yqq2python_essentials/lib/mount_utils_prerequisites.py#!/usr/bin/python # coding: utf-8 # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . import pm_utils skip_apt_update_default = False def mount_prerequisites(skip_apt_update=skip_apt_update_default): """Checks whether necessary packages for mounting have been installed and installs them if necessary using `pm_utils.install_packages`. Returns `True` if packages were installed and `False`otherwise.""" installed = False if not pm_utils.dpkg_check_package_installed("nfs-common"): pm_utils.install_packages(["nfs-common"], skip_apt_update=skip_apt_update) installed = True if not pm_utils.dpkg_check_package_installed("cifs-utils"): pm_utils.install_packages(["cifs-utils"], skip_apt_update=skip_apt_update) installed = True return installed PK&F*D D !python_essentials/lib/os_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # manages functions involving all sorts of runtime environments (not only # operating systems, like the name suggests, but also script language # interpreters) import os import sys import check_os import subprocess as sp def which(pgm): """replacement for python3's shutil.which""" if os.path.exists(pgm) and os.access(pgm,os.X_OK): return pgm path=os.getenv('PATH') for p in path.split(os.path.pathsep): p = os.path.join(p,pgm) if os.path.exists(p) and os.access(p,os.X_OK): return p def hostname(): if check_os.check_linux(): return sp.check_output(["hostname"]).strip().decode("utf-8") else: raise RuntimeError("operating system not supported") CHECK_JAVA_NOT_SET = 1 CHECK_JAVA_INVALID = 2 def check_java_valid(java_home=os.getenv("JAVA_HOME")): """checks that the `JAVA_HOME` environment variable is set, non-empty and points to a valid Java JDK @return `None` if the `JAVA_HOME` variable points to a valid Java JDK`, `CHECK_JAVA_NOT_SET` if `JAVA_HOME` isn't set or empty or `CHECK_JAVA_INVALID` if `JAVA_HOME` doesn't point to a valid Java JDK""" if java_home is None or java_home == "": return CHECK_JAVA_NOT_SET if not os.path.exists(java_home): return CHECK_JAVA_INVALID java_binary = os.path.join(java_home, "bin/java") if not os.path.exists(java_binary): return CHECK_JAVA_INVALID PK E$ff&python_essentials/lib/postgis_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . ################################################################################ # Authentication ################################################################################ # As the script provides a host parameter it is necessary to setup # authentication for hosts (local connections are setup to be trusted by # default) (this is handled with pexpect because this is the way to interact # subprocesses querying passwords) import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) import subprocess as sp import os import time # different possiblities of installation of postgis and hstore extension on database (extension support is dropped for postgis above 2.x), some package maintainers include scripts nevertheless -> let the caller choose EXTENSION_INSTALL_EXTENSION = 1 # use the extension command of postgres EXTENSION_INSTALL_SQL_FILE= 2 # use EXTENSION_INSTALLS = [EXTENSION_INSTALL_EXTENSION, EXTENSION_INSTALL_SQL_FILE] extension_install_default = EXTENSION_INSTALLS[0] pwfile_path = "./pwfile" # it's not wise to write to a file in system's temporary file directory which is readable for everybody authmethod = "md5" postgres_server_start_timeout = 5 postgres_server_stop_timeout = postgres_server_start_timeout # runs an appropriate initdb routine and initializes md5 login for db_user. User db_user is created (automatically when initdb is invoked with --username). # @args extension_install on of EXTENSION_INSTALLS # @raise ValueError is extension_install is not one of EXTENSION_INSTALLS def bootstrap_datadir(datadir_path, db_user, password="somepw", initdb="initdb"): pwfile = open(pwfile_path, "w") pwfile.write(password) pwfile.flush() pwfile.close() sp.check_call([initdb, "-D", datadir_path, "--username=%s" % db_user, "--pwfile=%s" % pwfile_path, "--auth=%s" % authmethod ]) os.remove(pwfile_path) def __pe_wrapper__(cmds, password): psql_proc = pe.spawn(str.join(" ", cmds)) # hstore handled below psql_proc.logfile = sys.stdout psql_proc.expect(["Password", "Passwort", "postgis"]) psql_proc.sendline(password) psql_proc.wait() def bootstrap_database(datadir_path, db_port, db_host, db_user, db_name, password="somepw", initdb="initdb", postgres="postgres", createdb="createdb", psql="psql", socket_dir="/tmp", extension_install=extension_install_default): if not extension_install in EXTENSION_INSTALLS: raise ValueError("extension_install has to be one of %s" % str(EXTENSION_INSTALLS)) # setup hba (could be done in bootstrap_datadir, but this would require # additional arguments) pg_hba_conf_file_path = os.path.join(datadir_path, "pg_hba.conf") file_utils.append_file(pg_hba_conf_file_path, "\nhost all %s 0.0.0.0 0.0.0.0 %s\n" % (db_user, authmethod)) postgres_process = sp.Popen([postgres, "-D", datadir_path, "-p", str(db_port), "-h", db_host, "-k", socket_dir]) try: logger.info("sleeping %s s to ensure postgres server started" % postgres_server_start_timeout) time.sleep(postgres_server_start_timeout) # not nice (should poll connection until success instead) __pe_wrapper__([createdb, "-p", str(db_port), "-h", db_host, "--username=%s" % db_user, db_name], password) __pe_wrapper__([psql, "-c", "\"grant all on database %s to %s;\"" % (db_name, db_user), "-p", str(db_port), "-h", db_host, "--username=%s" % db_user], password) if extension_install == EXTENSION_INSTALL_EXTENSION: __pe_wrapper__([psql, "-d", db_name, "-c", "\"create extension postgis; create extension postgis_topology;\"", "-p", str(db_port), "-h", db_host, "--username=%s" % db_user], password) # hstore handled below elif extension_install == EXTENSION_INSTALL_SQL_FILE: # ON_ERROR_STOP=1 causes the script to fail after the first failing command which is very useful for debugging as proceed doesn't make sense in a lot of cases __pe_wrapper__([psql, "-d", db_name, "-f", "/usr/share/postgresql/%s/contrib/postgis-%s/postgis.sql" % (pg_version, postgis_version_string), "-p", str(db_port), "-h", db_host, "--username=%s" % db_user, "-v", "ON_ERROR_STOP=1"], password) __pe_wrapper__([psql, "-d", db_name, "-f", "/usr/share/postgresql/%s/contrib/postgis-%s/topology.sql" % (pg_version, postgis_version_string), "-p", str(db_port), "-h", db_host, "--username=%s" % db_user, "-v", "ON_ERROR_STOP=1"], password) __pe_wrapper__([psql, "-d", db_name, "-f", "/usr/share/postgresql/%s/contrib/postgis-%s/spatial_ref_sys.sql" % (pg_version, postgis_version_string), "-p", str(db_port), "-h", db_host, "--username=%s" % db_user, "-v", "ON_ERROR_STOP=1"], password) __pe_wrapper__([psql, "-d", db_name, "-c", "\"create extension hstore;\"", "-p", str(db_port), "-h", db_host, "--username=%s" % db_user, "-v", "ON_ERROR_STOP=1"], password) # no sql file for hstore found, so install it from postgresql-contrib-x.x deb package (find another way (check wiki for existing solutions) if trouble occurs) __pe_wrapper__([psql, "-c", "\"ALTER USER %s WITH PASSWORD '%s';\"" % (db_user, password), "-p", str(db_port), "-h", db_host, "--username=%s" % db_user], password) finally: postgres_process.terminate() logger.info("sleeping %s s to ensure postgres server stopped" % postgres_server_stop_timeout) time.sleep(postgres_server_stop_timeout) PKH"E$~ ~ *python_essentials/lib/python_essentials.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # This file contains code which is used in different python-essentials scripts # external dependencies import os import ConfigParser import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) config_file_name_default = "python-essentials.cfg" config_file_pathes_default = [os.path.join(os.environ["HOME"], ".%s" % (config_file_name_default)), os.path.join("/etc", config_file_name_default), ] # creates an instance of ConfigParser.ConfigParser which either has a file loaded or not. Retrieving values from it with ConfigParser.get should always be backed up by a default value. # @args base_dir the directory against which the configuration file which is sibling of the invoked script def create_config_parser(config_file_name=config_file_name_default, config_file_pathes=config_file_pathes_default): chosen_config_file_path = None for config_file_path in config_file_pathes: if os.path.exists(config_file_path): logger.info("using '%s' as configuration file" % (config_file_path)) chosen_config_file_path = config_file_path break config = ConfigParser.ConfigParser() if chosen_config_file_path is None: logger.info("no configuration file found, using default values") else: # can't read None as argument passed to ConfigParser.read config.read(chosen_config_file_path) return config PKA ZE`vr..)python_essentials/lib/user_group_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . import file_line_utils import subprocess import os ############################## # user and group tools ############################## # @return the username associated with the specified id (as listed in /etc/passwd) or None if there's no such uid # can be done with id -u -n , but this is buggy in OpenSUSE due to missing services (as so many other commands) def username_by_id(uid): if str(type(uid)) != "": raise ValueError("uid has to be an int") passwd_lines = file_line_utils.file_lines("/etc/passwd", "#") for passwd_line in passwd_lines: passwd_line_content = passwd_line.split(":") if int(passwd_line_content[2]) == uid: return passwd_line_content[0] return None def groupname_by_id(gid): if str(type(uid)) != "": raise ValueError("uid has to be an int") passwd_lines = file_line_utils.file_files("/etc/passwd", "#") for passwd_line in passwd_lines: passwd_line_content = passwd_line.split(":") if (passwd_line_content[3]) == gid: return passwd_line_content[0] return None # @return the effective gid or -1 if group groupname doesn't exist def id_by_username(username): if not check_user_exists(username): return -1 ret_value = subprocess.check_output(["id", "-u", username]) return int(ret_value) # @return the effective gid or -1 if group groupname doesn't exist def id_by_groupname(groupname): if not check_group_exists(groupname): return -1 group_lines = file_line_utils.file_lines("/etc/group", comment_symbol="#") for group_line in group_lines: group_line_content = group_line.split(":") if (group_line_content[0]) == groupname: return int(group_line_content[2]) return None # implementation notes: # - id -g username is simply wrong # doesn't handle lines which start with whitespace in /etc/passwd correctly # @return True if username exists (in /etc/passwd) def check_user_exists(username): passwd_lines = file_line_utils.file_lines("/etc/passwd", comment_symbol="#") for passwd_line in passwd_lines: if passwd_line.startswith(username): return True return False # doesn't handle lines which start with whitespace in /etc/group correctly # @return True if groupname exists (in /etc/group) def check_group_exists(groupname): group_lines = file_line_utils.file_lines("/etc/group", comment_symbol="#") for group_line in group_lines: if group_line.startswith(groupname): return True return False # to be passed to preexec_fn argument of relevant subprocess.* functions, e.g.
# return_code = sp.check_call(["git", "rev-parse"], stderr=sp.PIPE, cwd=base_dir, preexec_fn=user_group_utils.demote_uid(user_group_utils.id_by_username(build_user)))
# 
def demote_uid(uid): return demote_uid_gid(uid,uid) def demote_uid_gid(uid, gid): def ret_value(): os.setgid(gid) os.setuid(uid) return ret_value PKGbSS!python_essentials/lib/check_os.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . import subprocess as sp import re import os import sys import platform # constants ARCHITECTURE_X86_64 = "x86_64" ARCHITECTURE_I386 = "i386" ARCHITECTURE_I686 = "i686" ARCHITECTURE_ARMV7L = "armv7l" INSTRUCTION_SET_AMD64 = "amd64" INSTRUCTION_SET_I386 = "i386" ARCHITECTURE_INSTRUCTION_SET_DICT = { ARCHITECTURE_X86_64: INSTRUCTION_SET_AMD64, ARCHITECTURE_I386: INSTRUCTION_SET_I386, ARCHITECTURE_I686: INSTRUCTION_SET_I386, } # binaries lsb_release = "lsb_release" def check_linux(): if not check_python3(): ret_value = sys.platform == "linux2" return ret_value else: ret_value = sys.platform == "linux" return ret_value def check_opensuse(): try: lsb_release_id_short = sp.check_output([lsb_release, "-d", "-s"]) return "openSUSE" in lsb_release_id_short except Exception: return False def check_ubuntu(): try: lsb_release_id_short = sp.check_output([lsb_release, "-d", "-s"]).strip().decode("utf-8") ret_value = "Ubuntu" in lsb_release_id_short return ret_value except Exception: return False def check_debian(): try: lsb_release_id_short = sp.check_output([lsb_release, "-d", "-s"]).strip().decode("utf-8") ret_value = "Debian" in lsb_release_id_short return ret_value except Exception: return False def check_linuxmint(): try: lsb_release_id_short = sp.check_output([lsb_release, "-d", "-s"]).strip().decode("utf-8") ret_value = "Linux Mint" in lsb_release_id_short return ret_value except Exception: return False def check_freebsd(): return platform.system() == "FreeBSD" def check_root(): uid = sp.check_output(["id","-u"]).strip() ret_value = int(uid) == 0 return ret_value # @return True if the python version is >= 3.0, otherwise False def check_python3(): ret_value = sys.version_info >= (3,0) return ret_value def findout_architecture(): architecture = sp.check_output(["uname","-m"]).strip() return architecture def findout_instruction_set(): architecture = findout_architecture() instruction_set = ARCHITECTURE_INSTRUCTION_SET_DICT[architecture] return instruction_set #lsb_release only works with python2.x def findout_release_ubuntu(): while not os.path.isfile("/usr/bin/python2.6") and not os.path.isfile("/usr/bin/python2.7"): print("Neither python2.6 nor python 2.7 could be found in /usr/bin/. It's necessary to determine your distribution release") confirm("proceed","Install python 2.6 or 2.7 and make it available at /usr/bin/python2.6 or /usr/lib/python2.7") release= sp.check_output([lsb_release,"-cs"]).strip().decode("utf-8") return release def findout_release_debian(): return findout_release_ubuntu() # useful is a feature is available for any version up from a certain (the tuple contains ints because strings are less comparable) def findout_release_ubuntu_tuple(): while not os.path.isfile("/usr/bin/python2.6") and not os.path.isfile("/usr/bin/python2.7"): print("Neither python2.6 nor python 2.7 could be found in /usr/bin/. It's necessary to determine your distribution release") confirm("proceed","Install python 2.6 or 2.7 and make it available at /usr/bin/python2.6 or /usr/lib/python2.7") release_number = sp.check_output([lsb_release, "-rs"]).strip().decode("utf-8") release_tuple = tuple([int(x) for x in release_number.split(".")]) return release_tuple # linuxmint release identifiers are natural numbers def findout_release_linuxmint(): ret_value = sp.check_output([lsb_release, "-r", "-s"]).decode("utf-8").strip() return int(ret_value) PKVG<\CC!python_essentials/lib/pm_utils.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # pm_utils definitely shouldn't use the sudo command as it makes it portable between root and non-root users import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) import subprocess as sp import os import sys import tempfile import re import augeas # project internal dependencies (`import python_essentials; import python_essentials.lib; import python_essentials.lib.check_os as check_os` fails at `import python_essentials.lib`; it's not necessary to import like that because package structure enforces that the imported scripts are siblings and can always be imported) import check_os import file_line_utils # indicates whether apt is up to date, i.e. whether `apt-get update` has been invoked already aptuptodate = False # indicates that apt sources are invalid, e.g. after packages sources have been changed @TODO: explain why in comment here apt_invalid = False dpkglock = "/var/lib/dpkg/lock" apt_get= "apt-get" add_apt_repository = "add-apt-repository" assume_yes_default = False skip_apt_update_default = False install_recommends_default=True install_suggests_default = False APT_OUTPUT_CONSOLE=1 APT_OUTPUT_TMP_FILE=2 APT_OUTPUT=APT_OUTPUT_CONSOLE PACKAGE_MANAGER_APT_GET="apt-get" ############################## # dpkg tools ############################## # @return True if package_name is installed, False otherwise def dpkg_check_package_installed(package_name): # old implementation relying on dpkg return codes (reported https://bugs.launchpad.net/ubuntu/+source/dpkg/+bug/1380326 for clearification of them) (more elegant, but in Ubuntu 14.10-beta1 return code is 0 for both installed and uninstalled packages; working dpkg of version 1.17.13ubuntu1 in Ubuntu 14.10 is the same like in Ubuntu 14.10-beta1 -> assume unexplainable anomaly, abstract implementation and make it possible to choose between returncode and output based implementation (not too elegant, but intuitive)) def __dpkg_check_package_installed_returncode__(): return_code = sp.call(["dpkg", "-s", package_name], stdout=sp.PIPE, stderr=sp.PIPE) return return_code == 0 def __dpkg_check_package_installed_output__(): dpkg_output = sp.check_output(["dpkg", "-s", package_name]) ret_value = "Package: %s\nStatus: install ok installed" % (package_name,) in dpkg_output return ret_value ret_value = __dpkg_check_package_installed_returncode__() return ret_value ############################## # apt-get tools ############################## # a wrapper around apt-get dist-upgrade to use the internal aptuptodate flag def upgrade(package_manager=PACKAGE_MANAGER_APT_GET , assume_yes=assume_yes_default, skip_apt_update=skip_apt_update_default, install_recommends=install_recommends_default, install_suggests=install_suggests_default): if package_manager == PACKAGE_MANAGER_APT_GET: aptupdate(skip_apt_update) command_list = [apt_get, "dist-upgrade"] options_command_list = __generate_apt_options_command_list__(assume_yes=assume_yes, install_recommends=install_recommends, install_suggests=install_suggests) sp.check_call(command_list+options_command_list) else: raise RuntimeError("package_manager %s not yet supported" % (package_manager,)) def install_apt_get_build_dep(packages, package_manager="apt-get", assume_yes=assume_yes_default, skip_apt_update=skip_apt_update_default): if packages == None or not type(packages) == type([]): raise Exception("packages has to be not None and a list") if len(packages) == 0: return 0 aptupdate(skip_apt_update) for package in packages: apt_get_output = sp.check_output([apt_get, "--dry-run", "build-dep", package]).strip() apt_get_output_lines = apt_get_output.split("\n") build_dep_packages = [] for apt_get_output_line in apt_get_output_lines: if apt_get_output_line.startswith(" "): build_dep_packages += re.split("[\\s]+", apt_get_output_line) build_dep_packages = [x for x in build_dep_packages if x != ""] install_packages(build_dep_packages, package_manager, assume_yes, skip_apt_update=skip_apt_update) # only checks for the specified packages, no for their recommends or suggests # @return True if all packages in packages are installed via package_manager, False otherwise def check_packages_installed(packages, package_manager="apt-get", skip_apt_update=skip_apt_update_default): package_managers = ["apt-get"] if package_manager == "apt-get": for package in packages: package_installed = dpkg_check_package_installed(package) if not package_installed: return False return True else: raise Exception("package_manager has to be one of "+str(package_managers)) # internal implementation notes: # - python-apt bindings have been dropped due to slow speed (dpkg queries are much faster) # quiet flag doesn't make sense because update can't be performed quietly obviously (maybe consider to switch to apt-api) # return apt-get return code or 0 if packages are already installed or packages is empty def install_packages(packages, package_manager="apt-get", assume_yes=assume_yes_default, skip_apt_update=skip_apt_update_default, install_recommends=install_recommends_default, install_suggests=install_suggests_default, preexec_fn=None): if check_packages_installed(packages, package_manager, skip_apt_update=skip_apt_update): return 0 return __package_manager_action__(packages, package_manager, ["install"], assume_yes, skip_apt_update=skip_apt_update, install_recommends=install_recommends, install_suggests=install_suggests, preexec_fn=preexec_fn) # doesn't check whether packages are installed # @return apt-get return code or 0 if packages is empty def reinstall_packages(packages, package_manager="apt-get", assume_yes=assume_yes_default, skip_apt_update=skip_apt_update_default, stdout=None, preexec_fn=None): return __package_manager_action__(packages, package_manager, ["--reinstall", "install"], assume_yes, skip_apt_update=skip_apt_update,stdout=None, preexec_fn=preexec_fn) # @return apt-get return code oder 0 if none in packages is installed or packages is empty def remove_packages(packages, package_manager="apt-get", assume_yes=assume_yes_default, skip_apt_update=skip_apt_update_default, preexec_fn=None): return __package_manager_action__(packages, package_manager, ["remove"], assume_yes, skip_apt_update=skip_apt_update, preexec_fn=preexec_fn) def __generate_apt_options_command_list__(assume_yes=assume_yes_default, install_recommends=install_recommends_default, install_suggests=install_suggests_default): # apt-get installs recommended packages by default, therefore the # option to deactivate it is negative (--no-install-recommends), while # the option to install suggests is option, there the option is positive command_list = [] if not install_recommends: command_list.append("--no-install-recommends") if install_suggests: command_list.append("--install-suggests") if assume_yes: command_list.append("--assume-yes") return command_list def __package_manager_action__(packages, package_manager, package_manager_action, assume_yes, skip_apt_update=skip_apt_update_default, stdout=None, install_recommends=install_recommends_default, install_suggests=install_suggests_default, preexec_fn=None): """quiet flag doesn't make sense because update can't be performed quietly obviously (maybe consider to switch to apt-api) @args packages a list of command to be inserted after the package manager command and default options and before the package list @args preexec_fn a function to be passed to the `preexec_fn` argument of `subprocess.Popen` """ if not "" == str(type(packages)) and str(type(packages)) != "": raise ValueError("packages isn't a list") if len(packages) == 0: return 0 if package_manager == "apt-get": aptupdate(skip_apt_update) command_list = [apt_get] options_command_list = __generate_apt_options_command_list__(assume_yes=assume_yes, install_recommends=install_recommends, install_suggests=install_suggests) sp.check_call(command_list+options_command_list+package_manager_action+packages, preexec_fn=preexec_fn) elif package_manager == "yast2": sp.check_call(["/sbin/yast2", "--"+package_manager_action]+packages, preexec_fn=preexec_fn) # yast2 doesn't accept string concatenation of packages with blank, but the passed list (it's acutually better style...) elif package_manager == "zypper": sp.check_call(["zypper", package_manager_action]+packages, preexec_fn=preexec_fn) elif package_manager == "equo": sp.check_call(["equo", package_manager_action]+packages, preexec_fn=preexec_fn) else: raise ValueError(str(package_manager)+" is not a supported package manager") # implementation notes: # - don't return catched exceptions, but just throw them (that's what exceptions are for) # - not a good idea to redirect output of subcommand other than update to file because interaction (e.g. choosing default display manager, comparing config file versions) is useful and necessary and suppressed when redirecting to file # - checking availability of apt lock programmatically causes incompatibility with invokation without root privileges -> removed # updates apt using apt-get update command. Throws exceptions as specified by sp.check_call if the command fails def invalidate_apt(): logger.debug("invalidating apt status (update forced at next package manager action)") global apt_invalid apt_invalid = True global aptuptodate aptuptodate = False # updates apt using sp.check_call, i.e. caller has to take care to handle exceptions which happen during execution def aptupdate(skip=skip_apt_update_default, force=False): global aptuptodate if (not aptuptodate and not skip) or force or apt_invalid: print("updating apt sources") apt_stdout = None if APT_OUTPUT == APT_OUTPUT_TMP_FILE: apt_get_update_log_file_tuple = tempfile.mkstemp("libinstall_apt_get_update.log") logger.info ("logging output of apt-get update to %s" % apt_get_update_log_file_tuple[1]) apt_stdout = apt_get_update_log_file_tuple[0] sp.check_call([apt_get, "--quiet", "update"], stdout=apt_stdout) aptuptodate = True ################################################################################ # apt source entries # ################################################################################ valid_source_line_types = ["deb", "deb-src"] def __validate_apt_source_function_params_augeas_root__(augeas_root, sources_dir_path, sources_file_path): if augeas_root is None: raise ValueError("augeas_root mustn't be None") if not os.path.exists(augeas_root): raise ValueError("augeas_root '%s' doesn't exist" % (augeas_root,)) if not os.path.isdir(augeas_root): raise ValueError("augeas_root '%s' isn't a directory, but has to be" % (augeas_root)) if not os.path.exists(sources_dir_path): raise ValueError("sources_dir_path '%s' doesn't exist" % (sources_dir_path,)) if not os.path.exists(sources_file_path): raise ValueError("sources_file_path '%s' doesn't exist" % (sources_file_path,)) def __validate_apt_source_function_params_type__(the_type): if not the_type in valid_source_line_types: raise ValueError("the_type '%s' isn't a valid source line type (has to be one of %s)" % (the_type, valid_source_line_types)) # Avoids the weakness of add-apt-repository command to add commented duplicates of lines which are already present by not adding those at all. # @args uri the URI of the apt line # @args component the component to be served (e.g. main) # @args distribution the distribution of the entry (e.g. trusty for an Ubuntu 14.04 system) # @args the_type the type of the entry (usually deb or deb-src) def check_apt_source_line_added(uri, component, distribution, the_type, augeas_root="/",): sources_dir_path= os.path.join(augeas_root, "etc/apt/sources.list.d") sources_file_path= os.path.join(augeas_root, "etc/apt/sources.list") __validate_apt_source_function_params_augeas_root__(augeas_root, sources_dir_path, sources_file_path) __validate_apt_source_function_params_type__(the_type) a = augeas.Augeas(root=augeas_root) # workaround missing loop label feature with inner function def __search__(): for sources_dir_file in [os.path.join(sources_dir_path, x) for x in os.listdir(sources_dir_path)]: commented_in_lines = a.match("/files/%s/*" % (os.path.relpath(sources_dir_file,augeas_root),)) # match doesn't return lines with comments; it doesn't matter whether ppa_sources_d_file starts with / for the match statement for commented_in_line in commented_in_lines: if a.get("%s/uri" % (commented_in_line,)) == uri and a.get("%s/component" % (commented_in_line,)) == component and a.get("%s/distribution" % (commented_in_line,)) == distribution and a.get("%s/type" % (commented_in_line,)) == the_type: return True return False match_found = __search__() if match_found: return True if not match_found: # only search in source.list if not found because there's no need to do # any validation in this this function commented_in_lines = a.match("/files/%s/*" % (os.path.relpath(sources_file_path,augeas_root),)) # match doesn't return lines with comments; it doesn't matter whether ppa_sources_d_file starts with / for the match statement match_found = False for commented_in_line in commented_in_lines: if a.get("%s/uri" % (commented_in_line,)) == uri and a.get("%s/component" % (commented_in_line,)) == component and a.get("%s/distribution" % (commented_in_line,)) == distribution and a.get("%s/type" % (commented_in_line,)) == the_type: a.close() return True a.close() return False # adds an entry based on uri, component, # distribution and the_type to etc/apt/sources.list # relatively to augeas_root. # # Consider using add-apt-repository or manually adding an entry in a separate # file into /etc/apt/sources.d/. # # There's no validation of parameters except augeas_root, i.e. this # might mess up your sources.list file quite easily. def add_apt_source_line(uri, component, distribution, the_type, augeas_root="/",): sources_dir_path= os.path.join(augeas_root, "etc/apt/sources.list.d") sources_file_path= os.path.join(augeas_root, "etc/apt/sources.list") __validate_apt_source_function_params_augeas_root__(augeas_root, sources_dir_path, sources_file_path) __validate_apt_source_function_params_type__(the_type) a = augeas.Augeas(root=augeas_root) a.set("/files/etc/apt/sources.list/01/distribution", distribution) # checkout http://augeas.net/tour.html if you find the 01 label for adding entries confusing (it simply is...) a.set("/files/etc/apt/sources.list/01/type", the_type) a.set("/files/etc/apt/sources.list/01/uri", uri) a.set("/files/etc/apt/sources.list/01/component", component) a.save() a.close() PKVG+2python_essentials/lib/python_essentials_globals.py#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # # Dieses Programm ist Freie Software: Sie können es unter den Bedingungen # der GNU General Public License, wie von der Free Software Foundation, # Version 3 der Lizenz oder (nach Ihrer Wahl) jeder neueren # veröffentlichten Version, weiterverbreiten und/oder modifizieren. # # Dieses Programm wird in der Hoffnung, dass es nützlich sein wird, aber # OHNE JEDE GEWÄHRLEISTUNG, bereitgestellt; sogar ohne die implizite # Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK. # Siehe die GNU General Public License für weitere Details. # # Sie sollten eine Kopie der GNU General Public License zusammen mit diesem # Programm erhalten haben. Wenn nicht, siehe . # The file provides programmatic default values as python constants. It isn't # necessary to read the values from a config file which is maintained in the # sources tree, but this way we have a default config file without any further # efforts import os import check_os if check_os.check_python3(): import configparser config = configparser.ConfigParser() else: import ConfigParser config = ConfigParser.ConfigParser() defaults_config_file_path = os.path.join(os.path.realpath(__file__), "..", "python-essentials.cfg") config.read(defaults_config_file_path) osm_postgis_dir_path = os.path.join(os.environ["HOME"], "osm_postgis_db-9.2") # config.get("pathes", "osm_postgis_dir_path") osm_postgis_version = (9,2) # config.get("versions", "osm_postgis_version") PKG^- 1python_essentials-0.0.0.dist-info/DESCRIPTION.rstUNKNOWN PKGqŔ/python_essentials-0.0.0.dist-info/metadata.json{"extensions": {"python.details": {"contacts": [{"email": "krichter722@aol.de", "name": "Karl-Philipp Richter", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/krichter722/python-essentials"}}}, "extras": [], "generator": "bdist_wheel (0.26.0)", "keywords": ["OSM", "postgresql", "osm2pgsql"], "license": "GPLv3", "metadata_version": "2.0", "name": "python-essentials", "run_requires": [{"requires": ["plac (>=0.9.1)"]}], "summary": "Basic python scripts and a wrapper around osm2pqsql which incorporates database creation and associated modularized library functions", "version": "0.0.0"}PKGir/python_essentials-0.0.0.dist-info/top_level.txtpython_essentials PKG''\\'python_essentials-0.0.0.dist-info/WHEELWheel-Version: 1.0 Generator: bdist_wheel (0.26.0) Root-Is-Purelib: true Tag: py2-none-any PKG N5*python_essentials-0.0.0.dist-info/METADATAMetadata-Version: 2.0 Name: python-essentials Version: 0.0.0 Summary: Basic python scripts and a wrapper around osm2pqsql which incorporates database creation and associated modularized library functions Home-page: https://github.com/krichter722/python-essentials Author: Karl-Philipp Richter Author-email: krichter722@aol.de License: GPLv3 Keywords: OSM postgresql osm2pgsql Platform: UNKNOWN Requires-Dist: plac (>=0.9.1) UNKNOWN PKG*C--(python_essentials-0.0.0.dist-info/RECORDpython_essentials/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 python_essentials/mount_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 python_essentials/osm_postgis_transform.py,sha256=tc0y-rmntozxSuY6i_4VE2KhPUKm7c_QvTzRf5QZDmM,11704 python_essentials/osm_postgis_transform_prequisites.py,sha256=-B7cRmhjS3crRyD3LMmCk0B-GMUqK4v6R2K6AXpTx0o,7964 python_essentials/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 python_essentials/lib/check_os.py,sha256=Qp7oVLjQFw5kKORNvYfY3u8icUxRcb84d5VDp1yzKEo,5203 python_essentials/lib/file_line_utils.py,sha256=B3TxyT-5gWZvYsSPorbuWmSx8LB_f4ZQty00kEggEdw,9247 python_essentials/lib/file_utils.py,sha256=cWifDsidZLkTCLkde7hCDpN8jJICL3FKqdwQQuGqTbk,10271 python_essentials/lib/mount_utils.py,sha256=bV-nNeBvPtclal_gcj-02XlkQoBvt2Qdvu8EdHhzICg,8566 python_essentials/lib/mount_utils_prerequisites.py,sha256=IzEiyIEjkQ1-rw_IPSR0pMej1F1PChgOK6IoOR9jvn4,2161 python_essentials/lib/os_utils.py,sha256=OhH7SUNN5hCuwW-uTDw38Tqf99alNvnnxTBl4dveZGY,2884 python_essentials/lib/pm_utils.py,sha256=r2oAVNmW3dSYpyBqudJZdZEJI7ywAqnH-fvS-1Z0sEI,17281 python_essentials/lib/postgis_utils.py,sha256=WPQot5nd6iNQKpYTuN2NfJTHnF_xCf-oac6ASWgte3E,7014 python_essentials/lib/python_essentials.py,sha256=VPQnOBm4B4vTHKTtuSR1z_Md06D9sCbdT24DmvjNo14,2942 python_essentials/lib/python_essentials_globals.py,sha256=39a_oC94_IoerQQYFNs_3sBnCGKtteKBlWJpsTNJN9A,2240 python_essentials/lib/user_group_utils.py,sha256=Im2aIFFJyLjsPabn0Rdnpkbe3UdDQo-rUuzJNMWpHwA,4654 python_essentials-0.0.0.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 python_essentials-0.0.0.dist-info/METADATA,sha256=Bf0MvSYdKMFJhEldqPZAJ2-jxIJl4ArOId_2mKPsOfE,435 python_essentials-0.0.0.dist-info/RECORD,, python_essentials-0.0.0.dist-info/WHEEL,sha256=JTb7YztR8fkPg6aSjc571Q4eiVHCwmUDlX8PhuuqIIE,92 python_essentials-0.0.0.dist-info/metadata.json,sha256=qWeWwXVvWe5PYtt16WBMXY5-IcCGUbplYkqYFnekTcs,660 python_essentials-0.0.0.dist-info/top_level.txt,sha256=NWA46lQmk38A4Jr9geDRY0RoexOO36yMbuygxqnQzq4,18 PK9YEpython_essentials/__init__.pyPKAF ;python_essentials/mount_utils.pyPKEɒñ--*ypython_essentials/osm_postgis_transform.pyPKE N96y.python_essentials/osm_postgis_transform_prequisites.pyPK ZE!Mpython_essentials/lib/__init__.pyPK2!Euc$$((Npython_essentials/lib/file_line_utils.pyPKH"EܽB((#rpython_essentials/lib/file_utils.pyPKS HFϒXtv!v!$python_essentials/lib/mount_utils.pyPK}(BF۝yqq2python_essentials/lib/mount_utils_prerequisites.pyPK&F*D D !fpython_essentials/lib/os_utils.pyPK E$ff&python_essentials/lib/postgis_utils.pyPKH"E$~ ~ *python_essentials/lib/python_essentials.pyPKA ZE`vr..)Ypython_essentials/lib/user_group_utils.pyPKGbSS! python_essentials/lib/check_os.pyPKVG<\CC!`python_essentials/lib/pm_utils.pyPKVG+2 cpython_essentials/lib/python_essentials_globals.pyPKG^- 10lpython_essentials-0.0.0.dist-info/DESCRIPTION.rstPKGqŔ/lpython_essentials-0.0.0.dist-info/metadata.jsonPKGir/jopython_essentials-0.0.0.dist-info/top_level.txtPKG''\\'opython_essentials-0.0.0.dist-info/WHEELPKG N5*jppython_essentials-0.0.0.dist-info/METADATAPKG*C--(erpython_essentials-0.0.0.dist-info/RECORDPKiz