PKƉ&HME$pgpm/settings.pyfrom pgpm.lib.utils import config from pgpm.lib import version __version__ = version.__version__ PGPM_SCHEMA_NAME = '_pgpm' PGPM_SCHEMA_SUBCLASS = 'basic' PGPM_VERSION = config.Version(__version__, config.VersionTypes.python) MIGRATIONS_FOLDER_NAME = 'lib/db_scripts/migrations' CONFIG_FILE_NAME = 'config.json' LOGGING_FORMATTER = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' PKɅrHÌ|| pgpm/app.py#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Deployment script that will deploy Postgres schemas to a given DB Copyright (c) Affinitas GmbH Usage: pgpm deploy ( | set ([--except] [...]) [-u | --user ]) [-m | --mode ] [-o | --owner ] [--usage ...] [-f ...] [--add-config ] [--debug-mode] [--vcs-ref ] [--vcs-link ] [--issue-ref ] [--issue-link ] [--compare-table-scripts-as-int] [--log-file ] [--global-config ] [--auto-commit] [--send-email] pgpm execute ( | set ([--except] [...]) --query [-u | --user ]) [--until-zero] [--log-file ] [--debug-mode] [--global-config ] pgpm remove --pkg-name [--old-rev ] [--log-file ] pgpm install ( | set ([--except] [...]) [-u | --user ]) [--upgrade] [--debug-mode] [--usage ...] [--log-file ] [--global-config ] pgpm uninstall ( | set [-u | --user ]) [--log-file ] [--global-config ] [--debug-mode] pgpm list set ([--except] [...]) [--log-file ] [--global-config ] pgpm -h | --help pgpm -v | --version Arguments: Connection string to postgres database. Can be in any format psycopg2 would understand it Name of an environment to be used to get connection strings from global-config file Name of a product. E.g. ed_live Unique name that identifies DB within the set Options: -h --help Show this screen. -v --version Show version. -f ..., --file ... Use it if you want to deploy only specific files (functions, types, etc). In that case these files if exist will be overridden. Should be followed by the list of names of files to deploy. -o , --owner Role to which schema owner and all objects inside will be changed. User connecting to DB needs to be a superuser. If omitted, user running the script will be the owner of schema and all objects inside If --mode flag is *overwrite* or --file flag is used then this is ignored as no new schema is created During installation a mandatory parameter to install on behalf of the specified user -u , --user User name that connects to DB. Make sense only when used with `set` option. If omitted than os user name is used. --usage ... Roles to which different usage privileges will be applied. If omitted, default behaviour of DB applies In case used with install command the following will be applied: GRANT SELECT, INSERT, UPDATE, DELETE on all current tables and by default in the future GRANT EXECUTE on all current functions and by default in the future GRANT USAGE, SELECT on all current sequences and by default in the future In case used with deploy command the following will be applied: GRANT USAGE on the schema GRANT EXECUTE on all current functions -m , --mode Deployment mode. Can be: * safe. Add constraints to deployment. Will not deploy schema if it already exists in the DB Will not deploy table script if it was deployed before (checking by filename) * moderate. If schema exists, will try to rename it by adding suffix "_" and deploy new schema with old name * unsafe. Allows cascade deleting of schema if it exists and adding new one Allows redeploying of deploy script even if was deployed before * overwrite. Will run scripts overwriting existing ones. User have to make sure that overwriting is possible. E.g. if type exists, rewriting should be preceded with dropping it first manually [default: safe] --add-config Provides path to additional config file. Attributes of this file overwrite config.json --debug-mode Debug level logging enabled if command is present. Otherwise Info level --upgrade Update pgpm to a newer version --vcs-ref Adds vcs reference to deployments log table --vcs-link Adds link to repository to deployments log table --issue-ref Adds issue reference to deployments log table --issue-link Adds link to issue tracking system to deployments log table --pkg-name Package name to be removed --old-rev If omitted all old revisions are deleted together with current revision. If specified just the specified revision is deleted --compare-table-scripts-as-int Flag says that when table scripts are running they should be ordered but first their names are to be converted to int. By default scripts are ordered by string comparison --log-file Log into a specified file. If not specified, logs are ignored --global-config path to a global-config file. If global gonfig exists also in ~/.pgpmconfig file then two dicts are merged (file formats are JSON). --send-email Send mail about deployment. Works only if email block exists in global config """ import logging import os import smtplib from pprint import pprint import pgpm.lib.install import pgpm.lib.deploy import pgpm.lib.execute import pgpm.lib.utils.config import pgpm.lib.utils.db import pgpm.lib.utils.vcs import sys import colorama import getpass import pgpm.utils.config import pgpm.utils.issue_trackers from docopt import docopt from pgpm import settings # getting logging logger = logging.getLogger(__name__) def main(): arguments = docopt(__doc__, version=settings.__version__) colorama.init() # setting logging formatter = logging.Formatter(settings.LOGGING_FORMATTER) if arguments['--debug-mode']: logger_level = logging.DEBUG else: logger_level = logging.INFO logger.setLevel(logger_level) if arguments['--log-file']: handler = logging.FileHandler(os.path.abspath(os.path.expanduser(arguments['--log-file']))) handler.setLevel(logger_level) handler.setFormatter(formatter) logger.addHandler(handler) else: handler = logging.StreamHandler() handler.setLevel(logger_level) handler.setFormatter(formatter) logger.addHandler(handler) # check if connecting user is set otherwise take os user connection_user = getpass.getuser() if arguments['--user']: connection_user = arguments['--user'][0] owner_role = None if arguments['--owner']: owner_role = arguments['--owner'][0] usage_roles = None if arguments['--usage']: usage_roles = arguments['--usage'] sys.stdout.write('\033[2J\033[0;0H') if arguments['install']: if arguments['--global-config']: extra_config_file = arguments['--global-config'] else: extra_config_file = None global_config = pgpm.utils.config.GlobalConfiguration('~/.pgpmconfig', extra_config_file) connections_list = global_config.get_list_connections(arguments[''], arguments[''], arguments[''], arguments['--except']) if arguments['set']: if len(connections_list) > 0: for connection_dict in connections_list: connection_string = 'host=' + connection_dict['host'] + ' port=' + str(connection_dict['port']) + \ ' dbname=' + connection_dict['dbname'] + ' user=' + connection_user _install_schema(connection_string, arguments['--usage'], arguments['--upgrade']) else: _emit_no_set_found(arguments[''], arguments['']) else: _install_schema(arguments[''], arguments['--usage'], arguments['--upgrade']) elif arguments['uninstall']: if arguments['--global-config']: extra_config_file = arguments['--global-config'] else: extra_config_file = None global_config = pgpm.utils.config.GlobalConfiguration('~/.pgpmconfig', extra_config_file) connections_list = global_config.get_list_connections(arguments[''], arguments[''], arguments[''], arguments['--except']) if arguments['set']: if len(connections_list) > 0: for connection_dict in connections_list: connection_string = 'host=' + connection_dict['host'] + ' port=' + str(connection_dict['port']) + \ ' dbname=' + connection_dict['dbname'] + ' user=' + connection_user _uninstall_schema(connection_string) else: _emit_no_set_found(arguments[''], arguments['']) else: _uninstall_schema(arguments['']) elif arguments['execute']: if arguments['--global-config']: extra_config_file = arguments['--global-config'] else: extra_config_file = None global_config = pgpm.utils.config.GlobalConfiguration('~/.pgpmconfig', extra_config_file) connections_list = global_config.get_list_connections(arguments[''], arguments[''], arguments[''], arguments['--except']) if arguments['set']: if len(connections_list) > 0: for connection_dict in connections_list: connection_string = 'host=' + connection_dict['host'] + ' port=' + str(connection_dict['port']) + \ ' dbname=' + connection_dict['dbname'] + ' user=' + connection_user _execute(connection_string, arguments['--query'], arguments['--until-zero']) else: _emit_no_set_found(arguments[''], arguments['']) else: _execute(arguments[''], arguments['--query'], arguments['--until-zero']) elif arguments['deploy']: deploy_result = {} if arguments['--global-config']: extra_config_file = arguments['--global-config'] else: extra_config_file = None global_config = pgpm.utils.config.GlobalConfiguration('~/.pgpmconfig', extra_config_file) connections_list = global_config.get_list_connections(arguments[''], arguments[''], arguments[''], arguments['--except']) config_dict = {} if owner_role: config_dict['owner_role'] = owner_role if usage_roles: config_dict['usage_roles'] = usage_roles config_object = pgpm.lib.utils.config.SchemaConfiguration( os.path.abspath(settings.CONFIG_FILE_NAME), config_dict, os.path.abspath('.')) if arguments['set']: if len(connections_list) > 0: target_names_list = [] for connection_dict in connections_list: connection_string = 'host=' + connection_dict['host'] + ' port=' + str(connection_dict['port']) + \ ' dbname=' + connection_dict['dbname'] + ' user=' + connection_user deploy_result = _deploy_schema(connection_string, mode=arguments['--mode'][0], files_deployment=arguments['--file'], vcs_ref=arguments['--vcs-ref'], vcs_link=arguments['--vcs-link'], issue_ref=arguments['--issue-ref'], issue_link=arguments['--issue-link'], compare_table_scripts_as_int=arguments['--compare-table-scripts-as-int'], auto_commit=arguments['--auto-commit'], config_object=config_object) if 'unique_name' in connection_dict and connection_dict['unique_name']: target_names_list.append(connection_dict['unique_name']) else: target_names_list.append(connection_dict['dbname']) if deploy_result['deployed_files_count'] > 0: target_str = 'environment: ' + connections_list[0]['environment'] + ', product: ' + \ connections_list[0]['product'] + ', DBs: ' + ', '.join(target_names_list) if arguments['--issue-ref'] and ('issue-tracker' in global_config.global_config_dict): _comment_issue_tracker(arguments, global_config, target_str, config_object, deploy_result) if arguments['--send-email'] and ('email' in global_config.global_config_dict): _send_mail(arguments, global_config, target_str, config_object, deploy_result) else: _emit_no_set_found(arguments[''], arguments['']) else: deploy_result = _deploy_schema(arguments[''], mode=arguments['--mode'][0], files_deployment=arguments['--file'], vcs_ref=arguments['--vcs-ref'], vcs_link=arguments['--vcs-link'], issue_ref=arguments['--issue-ref'], issue_link=arguments['--issue-link'], compare_table_scripts_as_int=arguments['--compare-table-scripts-as-int'], auto_commit=arguments['--auto-commit'], config_object=config_object) if deploy_result['deployed_files_count'] > 0: conn_parsed = pgpm.lib.utils.db.parse_connection_string_psycopg2(arguments['']) target_str = 'host: ' + conn_parsed['host'] + ', DB: ' + conn_parsed['dbname'] if arguments['--issue-ref'] and ('issue-tracker' in global_config.global_config_dict): _comment_issue_tracker(arguments, global_config, target_str, config_object, deploy_result) if arguments['--send-email'] and ('email' in global_config.global_config_dict): _send_mail(arguments, global_config, target_str, config_object, deploy_result) elif arguments['list']: if arguments['set']: if arguments['--global-config']: extra_config_file = arguments['--global-config'] else: extra_config_file = None global_config = pgpm.utils.config.GlobalConfiguration('~/.pgpmconfig', extra_config_file) connections_list = global_config.get_list_connections(arguments[''], arguments['']) if len(connections_list) > 0: pprint(global_config.get_list_connections(arguments[''], arguments[''], arguments[''], arguments['--except'])) else: _emit_no_set_found(arguments[''], arguments['']) else: print(arguments) def _install_schema(connection_string, user, upgrade): logger.info('Installing... {0}'.format(connection_string)) sys.stdout.write(colorama.Fore.YELLOW + 'Installing...' + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.flush() installation_manager = pgpm.lib.install.InstallationManager(connection_string, '_pgpm', 'basic', logger) try: installation_manager.install_pgpm_to_db(user, upgrade) except: print('\n') print('Something went wrong, check the logs. Aborting') print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(sys.exc_info()[2]) raise sys.stdout.write('\033[2K\r' + colorama.Fore.GREEN + 'Installed' + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.write('\n') logger.info('Successfully installed {0}'.format(connection_string)) return 0 def _uninstall_schema(connection_string): logger.info('Uninstalling pgpm... {0}'.format(connection_string)) sys.stdout.write(colorama.Fore.YELLOW + 'Uninstalling pgpm...' + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.flush() installation_manager = pgpm.lib.install.InstallationManager(connection_string, '_pgpm', 'basic', logger) try: installation_manager.uninstall_pgpm_from_db() except: print('\n') print('Something went wrong, check the logs. Aborting') print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(sys.exc_info()[2]) raise sys.stdout.write('\033[2K\r' + colorama.Fore.GREEN + 'Uninstalled pgpm' + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.write('\n') logger.info('Successfully uninstalled pgpm from {0}'.format(connection_string)) return 0 def _deploy_schema(connection_string, mode, files_deployment, vcs_ref, vcs_link, issue_ref, issue_link, compare_table_scripts_as_int, auto_commit, config_object): deploy_result = {} deploying = 'Deploying...' deployed_files = 'Deployed {0} files out of {1}' logger.info('Deploying... {0}'.format(connection_string)) sys.stdout.write(colorama.Fore.YELLOW + deploying + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.flush() deployment_manager = pgpm.lib.deploy.DeploymentManager( connection_string=connection_string, source_code_path=os.path.abspath('.'), config_object=config_object, pgpm_schema_name='_pgpm', logger=logger) try: deploy_result = deployment_manager.deploy_schema_to_db( mode=mode, files_deployment=files_deployment, vcs_ref=vcs_ref, vcs_link=vcs_link, issue_ref=issue_ref, issue_link=issue_link, compare_table_scripts_as_int=compare_table_scripts_as_int, auto_commit=auto_commit) except: print('\n') print('Something went wrong, check the logs. Aborting') print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(sys.exc_info()[2]) raise if deploy_result['code'] == deployment_manager.DEPLOYMENT_OUTPUT_CODE_OK \ and deploy_result['deployed_files_count'] == deploy_result['requested_files_count']: sys.stdout.write('\033[2K\r' + colorama.Fore.GREEN + deployed_files.format(deploy_result['deployed_files_count'], deploy_result['requested_files_count']) + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.write('\n') logger.info('Successfully deployed {0}'.format(connection_string)) elif deploy_result['code'] == deployment_manager.DEPLOYMENT_OUTPUT_CODE_NOT_ALL_DEPLOYED: sys.stdout.write('\033[2K\r' + colorama.Fore.YELLOW + deployed_files.format(deploy_result['deployed_files_count'], deploy_result['requested_files_count']) + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.write('\n') logger.warning('Not all files were deployed {0}'.format(connection_string)) return deploy_result def _execute(connection_string, query, until_zero=False): calling = 'Executing query {0}...'.format(query) called = 'Executed query {0} '.format(query) logger.info('Deploying... {0}'.format(connection_string)) sys.stdout.write(colorama.Fore.YELLOW + calling + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.flush() query_manager = pgpm.lib.execute.QueryExecutionManager( connection_string=connection_string, logger=logger) try: query_manager.execute(query, until_zero=until_zero) except: print('\n') print('Something went wrong, check the logs. Aborting') print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(sys.exc_info()[2]) raise sys.stdout.write('\033[2K\r' + colorama.Fore.GREEN + called + colorama.Fore.RESET + ' | ' + connection_string) sys.stdout.write('\n') logger.info('Successfully deployed {0}'.format(connection_string)) return 0 def _emit_no_set_found(environment_name, product_name): """ writes to std out and logs if no connection string is found for deployment :param environment_name: :param product_name: :return: """ sys.stdout.write(colorama.Fore.YELLOW + 'No connections found in global config file ' 'in environment: {0} for product: {1}' .format(environment_name, product_name) + colorama.Fore.RESET) sys.stdout.write('\n') logger.warning('No connections found in environment: {0} for product: {1}' .format(environment_name, product_name)) def _send_mail(arguments, global_config, target_string, config_object, deploy_result): if global_config.global_config_dict['email']['type'] == "SMTP": logger.info('Sending an email about deployment') issue_ref_subject_text = '' issue_ref_body_text = '' if arguments['--issue-ref']: issue_ref_subject_text = "({0})".format(arguments['--issue-ref']) if ("issue-tracker" in global_config.global_config_dict) and \ ("issue-path-template" in global_config.global_config_dict["issue-tracker"]): issue_ref_body_text = \ "({0})".format(global_config.global_config_dict["issue-tracker"]["issue-path-template"] .format(issue_ref=arguments['--issue-ref'])) else: issue_ref_body_text = "({0})".format(arguments['--issue-ref']) _schema_row = '' if config_object.scope == config_object.SCHEMA_SCOPE: _schema_row += 'Schema name' if config_object.subclass == config_object.BASIC_SUBCLASS: _schema_row += config_object.name elif config_object.subclass == config_object.VERSIONED_SUBCLASS: _schema_row += config_object.name + '_' + config_object.version.to_string() _schema_row += '' _files_row = '' if arguments['--file']: _files_row += 'Files deployed' if deploy_result['function_scripts_deployed']: _files_row += ',
'.join(deploy_result['function_scripts_deployed']) if deploy_result['type_scripts_deployed']: _files_row += ',
'.join(deploy_result['type_scripts_deployed']) if deploy_result['view_scripts_deployed']: _files_row += ',
'.join(deploy_result['view_scripts_deployed']) if deploy_result['trigger_scripts_deployed']: _files_row += ',
'.join(deploy_result['trigger_scripts_deployed']) if deploy_result['table_scripts_deployed']: _files_row += ',
'.join(deploy_result['table_scripts_deployed']) _files_row += '' else: _files_row += 'Files deployedall' _git_commit_row = '' _git_repo_row = '' if pgpm.lib.utils.vcs.is_git_directory(os.path.abspath('.')): _git_repo_row += 'GIT repo' _git_repo_row += pgpm.lib.utils.vcs.get_git_remote(os.path.abspath('.')) _git_repo_row += '' if not arguments['--vcs-ref']: _git_commit_row += 'GIT commit' _git_commit_row += pgpm.lib.utils.vcs.get_git_revision_hash(os.path.abspath('.')) _git_commit_row += '' pkg_desc_text = """ {1} {2} {3} {4}
Package Name {0}
""".format(config_object.name, _schema_row, _files_row, _git_repo_row, _git_commit_row) mail_message = "From: {0}\r\nTo: {1}\r\nMIME-Version: 1.0\r\nContent-type: text/html\r\nSubject: {2}\r\n{3}"\ .format(global_config.global_config_dict['email']['from'], global_config.global_config_dict['email']['to'], global_config.global_config_dict['email']['subject'] .format(issue_ref=issue_ref_subject_text, target_subject=target_string), global_config.global_config_dict['email']['body'] .format(issue_ref=issue_ref_body_text, target=target_string, package_description=pkg_desc_text)) smtp = smtplib.SMTP(global_config.global_config_dict['email']['host'], global_config.global_config_dict['email']['port']) # smtp.set_debuglevel(1) if global_config.global_config_dict['email']['TLS'] == True: smtp.starttls() if 'credentials' in global_config.global_config_dict['email']: try: smtp.login(global_config.global_config_dict['email']['credentials']['username'], global_config.global_config_dict['email']['credentials']['password']) except smtplib.SMTPAuthenticationError: logger.warning('SMTP authentication failed though required in the global config. ' 'Will try to send email without authentication') smtp.sendmail(global_config.global_config_dict['email']['from'], global_config.global_config_dict['email']['to'], mail_message) smtp.quit() def _comment_issue_tracker(arguments, global_config, target_string, config_object, deploy_result): if global_config.global_config_dict['issue-tracker']['type'] == "JIRA": logger.info('Leaving a comment to JIRA issue {0} about deployment'.format(arguments['--issue-ref'])) jira = pgpm.utils.issue_trackers.Jira(global_config.global_config_dict['issue-tracker']['url'], logger) _schema_row = '' if config_object.scope == config_object.SCHEMA_SCOPE: _schema_row += '\n||Schema name|' if config_object.subclass == config_object.BASIC_SUBCLASS: _schema_row += config_object.name elif config_object.subclass == config_object.VERSIONED_SUBCLASS: _schema_row += config_object.name + '_' + config_object.version.to_string() _schema_row += '|' _files_row = '' if arguments['--file']: _files_row += '\n||Files deployed|' if deploy_result['function_scripts_deployed']: _files_row += ',\n'.join(deploy_result['function_scripts_deployed']) if deploy_result['type_scripts_deployed']: _files_row += ',\n'.join(deploy_result['type_scripts_deployed']) if deploy_result['view_scripts_deployed']: _files_row += ',\n'.join(deploy_result['view_scripts_deployed']) if deploy_result['trigger_scripts_deployed']: _files_row += ',\n'.join(deploy_result['trigger_scripts_deployed']) if deploy_result['table_scripts_deployed']: _files_row += ',\n'.join(deploy_result['table_scripts_deployed']) _files_row += '|' else: _files_row += '\n||Files deployed|all|' _git_commit_row = '' _git_repo_row = '' if pgpm.lib.utils.vcs.is_git_directory(os.path.abspath('.')): _git_repo_row += '\n||GIT repo|' _git_repo_row += pgpm.lib.utils.vcs.get_git_remote(os.path.abspath('.')) _git_repo_row += '|' if not arguments['--vcs-ref']: _git_commit_row += '\n||GIT commit|' _git_commit_row += pgpm.lib.utils.vcs.get_git_revision_hash(os.path.abspath('.')) _git_commit_row += '|' comment_body = global_config.global_config_dict['issue-tracker']['comment-body']\ .format(pkg_name=config_object.name, target=target_string, schema=_schema_row, files=_files_row, git_repo=_git_repo_row, git_commit=_git_commit_row) jira.call_jira_rest("/issue/" + arguments['--issue-ref'] + "/comment", global_config.global_config_dict['issue-tracker']['username'], global_config.global_config_dict['issue-tracker']['password'], "POST", {"body": comment_body}) logger.info('Jira comment done') if __name__ == '__main__': main() PKƉ&Hpgpm/__init__.pyPKnuH1pgpm/lib/version.py__version__ = '0.1.58' PKɅrH**pgpm/lib/install.pyimport logging import pkgutil import distutils.version import sys import pkg_resources import psycopg2 import re import pgpm.lib.abstract_deploy import pgpm.lib.utils import pgpm.lib.utils.db import pgpm.lib.utils.misc import pgpm.lib.version import pgpm.lib.utils.config class InstallationManager(pgpm.lib.abstract_deploy.AbstractDeploymentManager): """ Class that will manage pgpm installation """ def __init__(self, connection_string, pgpm_schema_name='_pgpm', pgpm_schema_subclass='basic', logger=None): """ initialises the manager and connects to the DB :param connection_string: connection string consumable by DBAPI 2.0 :param logger: logger object """ super(InstallationManager, self).__init__(connection_string, pgpm_schema_name, logger) self._main_module_name = 'pgpm' self._pgpm_schema_subclass = pgpm_schema_subclass def install_pgpm_to_db(self, user_roles, upgrade=False): """ Installs package manager """ if self._conn.closed: self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) cur = self._conn.cursor() # get pgpm functions scripts_dict = pgpm.lib.utils.misc.collect_scripts_from_sources('lib/db_scripts/functions', False, '.', True, self._logger) # get current user cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.current_user_sql) current_user = cur.fetchone()[0] # check if current user is a super user cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.is_superuser_sql) is_cur_superuser = cur.fetchone()[0] if not is_cur_superuser: self._logger.debug('User {0} is not a superuser. It is recommended that you connect as superuser ' 'when installing pgpm as some operation might need superuser rights' .format(current_user)) # Create schema if it doesn't exist if pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, self._pgpm_schema_name): # Executing pgpm functions if len(scripts_dict) > 0: self._logger.info('Running functions definitions scripts') self._logger.debug(scripts_dict) pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) for key, value in scripts_dict.items(): if value: cur.execute(value) self._logger.debug('Functions loaded to schema {0}'.format(self._pgpm_schema_name)) else: self._logger.debug('No function scripts to deploy') self._conn.commit() # check installed version of _pgpm schema. pgpm_v_db_tuple = pgpm.lib.utils.db.SqlScriptsHelper.get_pgpm_db_version(cur, self._pgpm_schema_name) pgpm_v_db = distutils.version.StrictVersion(".".join(pgpm_v_db_tuple)) pgpm_v_script = distutils.version.StrictVersion(pgpm.lib.version.__version__) if pgpm_v_script > pgpm_v_db: if upgrade: self._migrate_pgpm_version(cur, pgpm_v_db, pgpm_v_script, True) else: self._migrate_pgpm_version(cur, pgpm_v_db, pgpm_v_script, False) elif pgpm_v_script < pgpm_v_db: self._logger.error('Deployment script\'s version is lower than the version of {0} schema ' 'installed in DB. Update pgpm script first.'.format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) else: self._logger.error('Can\'t install pgpm as schema {0} already exists'.format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) else: # Prepare and execute preamble deployment_script_preamble = pkgutil.get_data(self._main_module_name, 'lib/db_scripts/deploy_prepare_config.sql') self._logger.info('Executing a preamble to install statement') cur.execute(deployment_script_preamble) # Python 3.x doesn't have format for byte strings so we have to convert install_script = pkgutil.get_data(self._main_module_name, 'lib/db_scripts/install.tmpl.sql').decode('utf-8') self._logger.info('Installing package manager') cur.execute(install_script.format(schema_name=self._pgpm_schema_name)) migration_files_list = sorted(pkg_resources.resource_listdir(self._main_module_name, 'lib/db_scripts/migrations/'), key=lambda filename: distutils.version.StrictVersion(filename.split('-')[0])) # Executing pgpm functions if len(scripts_dict) > 0: self._logger.info('Running functions definitions scripts') self._logger.debug(scripts_dict) pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) for key, value in scripts_dict.items(): cur.execute(value) self._logger.debug('Functions loaded to schema {0}'.format(self._pgpm_schema_name)) else: self._logger.debug('No function scripts to deploy') # Executing migration scripts after as they contain triggers that trigger functions that were created on top for file_info in migration_files_list: # Python 3.x doesn't have format for byte strings so we have to convert migration_script = pkg_resources.resource_string(self._main_module_name, 'lib/db_scripts/migrations/{0}'.format(file_info))\ .decode('utf-8').format(schema_name=self._pgpm_schema_name) self._logger.debug('Running version upgrade script {0}'.format(file_info)) self._logger.debug(migration_script) cur.execute(migration_script) cur.callproc('_add_migration_info', ['0.0.7', pgpm.lib.version.__version__]) self._conn.commit() # check if users of pgpm are specified if not user_roles: self._logger.debug('No user was specified to have permisions on _pgpm schema. ' 'This means only user that installed _pgpm will be able to deploy. ' 'We recommend adding more users.') else: # set default privilages to users pgpm.lib.utils.db.SqlScriptsHelper.grant_default_usage_install_privileges( cur, self._pgpm_schema_name, ', '.join(user_roles)) pgpm.lib.utils.db.SqlScriptsHelper.grant_usage_install_privileges( cur, self._pgpm_schema_name, ', '.join(user_roles)) pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_upsert_package_info', [self._pgpm_schema_name, self._pgpm_schema_subclass, self._pgpm_version.major, self._pgpm_version.minor, self._pgpm_version.patch, self._pgpm_version.pre, self._pgpm_version.metadata, 'Package manager for Postgres', 'MIT']) # Commit transaction self._conn.commit() self._conn.close() return 0 def uninstall_pgpm_from_db(self): """ Removes pgpm from db and all related metadata (_pgpm schema). Install packages are left as they are :return: 0 if successful and error otherwise """ drop_schema_cascade_script = 'DROP SCHEMA {schema_name} CASCADE;' if self._conn.closed: self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) cur = self._conn.cursor() # get current user cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.current_user_sql) current_user = cur.fetchone()[0] # check if current user is a super user cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.is_superuser_sql) is_cur_superuser = cur.fetchone()[0] if not is_cur_superuser: self._logger.debug('User {0} is not a superuser. Only superuser can remove pgpm' .format(current_user)) sys.exit(1) self._logger.debug('Removing pgpm from DB by dropping schema {0}'.format(self._pgpm_schema_name)) cur.execute(drop_schema_cascade_script.format(schema_name=self._pgpm_schema_name)) # Commit transaction self._conn.commit() self._conn.close() return 0 def _migrate_pgpm_version(self, cur, version_pgpm_db, version_pgpm_script, migrate_or_leave): """ Enact migration script from one version of pgpm to another (newer) :param cur: :param migrate_or_leave: True if migrating, False if exiting :return: """ migrations_file_re = r'^(.*)-(.*).tmpl.sql$' migration_files_list = sorted(pkg_resources.resource_listdir(self._main_module_name, 'lib/db_scripts/migrations/'), key=lambda filename: distutils.version.StrictVersion(filename.split('-')[0])) for file_info in migration_files_list: versions_list = re.compile(migrations_file_re, flags=re.IGNORECASE).findall(file_info) version_a = distutils.version.StrictVersion(versions_list[0][0]) version_b = distutils.version.StrictVersion(versions_list[0][1]) if version_pgpm_script >= version_a and version_b > version_pgpm_db: # Python 3.x doesn't have format for byte strings so we have to convert migration_script = pkg_resources.resource_string(self._main_module_name, 'lib/db_scripts/migrations/{0}'.format(file_info))\ .decode('utf-8').format(schema_name=self._pgpm_schema_name) if migrate_or_leave: self._logger.debug('Running version upgrade script {0}'.format(file_info)) self._logger.debug(migration_script) cur.execute(migration_script) self._conn.commit() pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_add_migration_info', [versions_list[0][0], versions_list[0][1]]) self._conn.commit() self._logger.debug('Successfully finished running version upgrade script {0}'.format(file_info)) if not migrate_or_leave: self._logger.error('{0} schema version is outdated. Please run pgpm install --upgrade first.' .format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) PKɅrHvvpgpm/lib/deploy.pyimport json import logging import pkgutil import distutils.version import sys import collections import os import psycopg2 import re import sqlparse import pgpm.lib.abstract_deploy import pgpm.lib.utils import pgpm.lib.utils.db import pgpm.lib.utils.misc import pgpm.lib.version import pgpm.lib.utils.config import pgpm.lib.utils.vcs class DeploymentManager(pgpm.lib.abstract_deploy.AbstractDeploymentManager): """ Class that will manage db code deployments """ def __init__(self, connection_string, source_code_path=None, config_path=None, config_dict=None, config_object=None, pgpm_schema_name='_pgpm', logger=None): """ initialises the manager and connects to the DB :param connection_string: connection string consumable by DBAPI 2.0 :param source_code_path: path to where package is :param config_path: string or array to where config/configs are :param config_dict: dictionary with config :param config_object: SchemaConfiguration object :param logger: logger object """ super(DeploymentManager, self).__init__(connection_string, pgpm_schema_name, logger) if source_code_path: self._source_code_path = source_code_path elif config_path: self._source_code_path = os.path.dirname(config_path) if config_object: self._config = config_object else: self._config = pgpm.lib.utils.config.SchemaConfiguration(config_path, config_dict, self._source_code_path) self._logger.debug('Loading project configuration...') def deploy_schema_to_db(self, mode='safe', files_deployment=None, vcs_ref=None, vcs_link=None, issue_ref=None, issue_link=None, compare_table_scripts_as_int=False, config_path=None, config_dict=None, config_object=None, source_code_path=None, auto_commit=False): """ Deploys schema :param files_deployment: if specific script to be deployed, only find them :param mode: :param vcs_ref: :param vcs_link: :param issue_ref: :param issue_link: :param compare_table_scripts_as_int: :param config_path: :param config_dict: :param config_object: :param source_code_path: :param auto_commit: :return: dictionary of the following format: { code: 0 if all fine, otherwise something else, message: message on the output function_scripts_requested: list of function files requested for deployment function_scripts_deployed: list of function files deployed type_scripts_requested: list of type files requested for deployment type_scripts_deployed: list of type files deployed view_scripts_requested: list of view files requested for deployment view_scripts_deployed: list of view files deployed trigger_scripts_requested: list of trigger files requested for deployment trigger_scripts_deployed: list of trigger files deployed table_scripts_requested: list of table files requested for deployment table_scripts_deployed: list of table files deployed requested_files_count: count of requested files to deploy deployed_files_count: count of deployed files } :rtype: dict """ return_value = {} if files_deployment: return_value['function_scripts_requested'] = files_deployment return_value['type_scripts_requested'] = [] return_value['view_scripts_requested'] = [] return_value['trigger_scripts_requested'] = [] return_value['table_scripts_requested'] = [] if auto_commit: if mode == 'safe' and files_deployment: self._logger.debug("Auto commit mode is on. Be careful.") else: self._logger.error("Auto commit deployment can only be done with file " "deployments and in safe mode for security reasons") raise ValueError("Auto commit deployment can only be done with file " "deployments and in safe mode for security reasons") # set source code path if exists self._source_code_path = self._source_code_path or source_code_path # set configuration if either of config_path, config_dict, config_object are set. # Otherwise use configuration from class initialisation if config_object: self._config = config_object elif config_path or config_dict: self._config = pgpm.lib.utils.config.SchemaConfiguration(config_path, config_dict, self._source_code_path) # Check if in git repo if not vcs_ref: if pgpm.lib.utils.vcs.is_git_directory(self._source_code_path): vcs_ref = pgpm.lib.utils.vcs.get_git_revision_hash(self._source_code_path) self._logger.debug('commit reference to be deployed is {0}'.format(vcs_ref)) else: self._logger.debug('Folder is not a known vcs repository') self._logger.debug('Configuration of package {0} of version {1} loaded successfully.' .format(self._config.name, self._config.version.raw)) # TODO: change to to_string once discussed # .format(self._config.name, self._config.version.to_string())) # Get scripts type_scripts_dict = self._get_scripts(self._config.types_path, files_deployment, "types", self._source_code_path) if not files_deployment: return_value['type_scripts_requested'] = [key for key in type_scripts_dict] function_scripts_dict = self._get_scripts(self._config.functions_path, files_deployment, "functions", self._source_code_path) if not files_deployment: return_value['function_scripts_requested'] = [key for key in function_scripts_dict] view_scripts_dict = self._get_scripts(self._config.views_path, files_deployment, "views", self._source_code_path) if not files_deployment: return_value['view_scripts_requested'] = [key for key in view_scripts_dict] trigger_scripts_dict = self._get_scripts(self._config.triggers_path, files_deployment, "triggers", self._source_code_path) if not files_deployment: return_value['trigger_scripts_requested'] = [key for key in trigger_scripts_dict] # before with table scripts only file name was an identifier. Now whole relative path the file # (relative to config.json) # table_scripts_dict_denormalised = self._get_scripts(self._config.tables_path, files_deployment, # "tables", self._source_code_path) # table_scripts_dict = {os.path.split(k)[1]: v for k, v in table_scripts_dict_denormalised.items()} table_scripts_dict = self._get_scripts(self._config.tables_path, files_deployment, "tables", self._source_code_path) if not files_deployment: return_value['table_scripts_requested'] = [key for key in table_scripts_dict] if self._conn.closed: self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) cur = self._conn.cursor() # be cautious, dangerous thing if auto_commit: self._conn.autocommit = True # Check if DB is pgpm enabled if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, self._pgpm_schema_name): self._logger.error('Can\'t deploy schemas to DB where pgpm was not installed. ' 'First install pgpm by running pgpm install') self._conn.close() sys.exit(1) # check installed version of _pgpm schema. pgpm_v_db_tuple = pgpm.lib.utils.db.SqlScriptsHelper.get_pgpm_db_version(cur, self._pgpm_schema_name) pgpm_v_db = distutils.version.StrictVersion(".".join(pgpm_v_db_tuple)) pgpm_v_script = distutils.version.StrictVersion(pgpm.lib.version.__version__) if pgpm_v_script > pgpm_v_db: self._logger.error('{0} schema version is outdated. Please run pgpm install --upgrade first.' .format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) elif pgpm_v_script < pgpm_v_db: self._logger.error('Deployment script\'s version is lower than the version of {0} schema ' 'installed in DB. Update pgpm script first.'.format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) # Resolve dependencies list_of_deps_ids = [] if self._config.dependencies: _is_deps_resolved, list_of_deps_ids, _list_of_unresolved_deps = \ self._resolve_dependencies(cur, self._config.dependencies) if not _is_deps_resolved: self._logger.error('There are unresolved dependencies. Deploy the following package(s) and try again:') for unresolved_pkg in _list_of_unresolved_deps: self._logger.error('{0}'.format(unresolved_pkg)) self._conn.close() sys.exit(1) # Prepare and execute preamble _deployment_script_preamble = pkgutil.get_data('pgpm', 'lib/db_scripts/deploy_prepare_config.sql') self._logger.debug('Executing a preamble to deployment statement') cur.execute(_deployment_script_preamble) # Get schema name from project configuration schema_name = '' if self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: if self._config.subclass == 'versioned': schema_name = '{0}_{1}'.format(self._config.name, self._config.version.raw) self._logger.debug('Schema {0} will be updated'.format(schema_name)) elif self._config.subclass == 'basic': schema_name = '{0}'.format(self._config.name) if not files_deployment: self._logger.debug('Schema {0} will be created/replaced'.format(schema_name)) else: self._logger.debug('Schema {0} will be updated'.format(schema_name)) # Create schema or update it if exists (if not in production mode) and set search path if files_deployment: # if specific scripts to be deployed if self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, schema_name): self._logger.error('Can\'t deploy scripts to schema {0}. Schema doesn\'t exist in database' .format(schema_name)) self._conn.close() sys.exit(1) else: pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, schema_name) self._logger.debug('Search_path was changed to schema {0}'.format(schema_name)) else: if self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, schema_name): pgpm.lib.utils.db.SqlScriptsHelper.create_db_schema(cur, schema_name) elif mode == 'safe': self._logger.error('Schema already exists. It won\'t be overriden in safe mode. ' 'Rerun your script with "-m moderate", "-m overwrite" or "-m unsafe" flags') self._conn.close() sys.exit(1) elif mode == 'moderate': old_schema_exists = True old_schema_rev = 0 while old_schema_exists: old_schema_exists = pgpm.lib.utils.db.SqlScriptsHelper.schema_exists( cur, schema_name + '_' + str(old_schema_rev)) if old_schema_exists: old_schema_rev += 1 old_schema_name = schema_name + '_' + str(old_schema_rev) self._logger.debug('Schema already exists. It will be renamed to {0} in moderate mode. Renaming...' .format(old_schema_name)) _rename_schema_script = "ALTER SCHEMA {0} RENAME TO {1};\n".format(schema_name, old_schema_name) cur.execute(_rename_schema_script) # Add metadata to pgpm schema pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_set_revision_package'.format(self._pgpm_schema_name), [self._config.name, self._config.subclass, old_schema_rev, self._config.version.major, self._config.version.minor, self._config.version.patch, self._config.version.pre]) self._logger.debug('Schema {0} was renamed to {1}. Meta info was added to {2} schema' .format(schema_name, old_schema_name, self._pgpm_schema_name)) pgpm.lib.utils.db.SqlScriptsHelper.create_db_schema(cur, schema_name) elif mode == 'unsafe': _drop_schema_script = "DROP SCHEMA {0} CASCADE;\n".format(schema_name) cur.execute(_drop_schema_script) self._logger.debug('Dropping old schema {0}'.format(schema_name)) pgpm.lib.utils.db.SqlScriptsHelper.create_db_schema(cur, schema_name) if self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, schema_name) # Reordering and executing types return_value['type_scripts_deployed'] = [] if len(type_scripts_dict) > 0: types_script = '\n'.join([''.join(value) for key, value in type_scripts_dict.items()]) type_drop_scripts, type_ordered_scripts, type_unordered_scripts = self._reorder_types(types_script) if type_drop_scripts: for statement in type_drop_scripts: if statement: cur.execute(statement) if type_ordered_scripts: for statement in type_ordered_scripts: if statement: cur.execute(statement) if type_unordered_scripts: for statement in type_unordered_scripts: if statement: cur.execute(statement) self._logger.debug('Types loaded to schema {0}'.format(schema_name)) return_value['type_scripts_deployed'] = [key for key in type_scripts_dict] else: self._logger.debug('No type scripts to deploy') # Executing Table DDL scripts executed_table_scripts = [] return_value['table_scripts_deployed'] = [] if len(table_scripts_dict) > 0: if compare_table_scripts_as_int: sorted_table_scripts_dict = collections.OrderedDict(sorted(table_scripts_dict.items(), key=lambda t: int(t[0].rsplit('.', 1)[0]))) else: sorted_table_scripts_dict = collections.OrderedDict(sorted(table_scripts_dict.items(), key=lambda t: t[0].rsplit('.', 1)[0])) self._logger.debug('Running Table DDL scripts') for key, value in sorted_table_scripts_dict.items(): pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_is_table_ddl_executed'.format(self._pgpm_schema_name), [ key, self._config.name, self._config.subclass, self._config.version.major, self._config.version.minor, self._config.version.patch, self._config.version.pre ]) is_table_executed = cur.fetchone()[0] if self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, schema_name) elif self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.DATABASE_SCOPE: cur.execute("SET search_path TO DEFAULT ;") if (not is_table_executed) or (mode == 'unsafe'): # if auto commit mode than every statement is called separately. # this is done this way as auto commit is normally used when non transaction statements are called # then this is needed to avoid "cannot be executed from a function or multi-command string" errors if auto_commit: for statement in sqlparse.split(value): if statement: cur.execute(statement) else: cur.execute(value) self._logger.debug(value) self._logger.debug('{0} executed for schema {1}'.format(key, schema_name)) executed_table_scripts.append(key) return_value['table_scripts_deployed'].append(key) else: self._logger.debug('{0} is not executed for schema {1} as it has already been executed before. ' .format(key, schema_name)) else: self._logger.debug('No Table DDL scripts to execute') # Executing functions return_value['function_scripts_deployed'] = [] if len(function_scripts_dict) > 0: self._logger.debug('Running functions definitions scripts') for key, value in function_scripts_dict.items(): # if auto commit mode than every statement is called separately. # this is done this way as auto commit is normally used when non transaction statements are called # then this is needed to avoid "cannot be executed from a function or multi-command string" errors if auto_commit: for statement in sqlparse.split(value): if statement: cur.execute(statement) else: cur.execute(value) return_value['function_scripts_deployed'].append(key) self._logger.debug('Functions loaded to schema {0}'.format(schema_name)) else: self._logger.debug('No function scripts to deploy') # Executing views return_value['view_scripts_deployed'] = [] if len(view_scripts_dict) > 0: self._logger.debug('Running views definitions scripts') for key, value in view_scripts_dict.items(): # if auto commit mode than every statement is called separately. # this is done this way as auto commit is normally used when non transaction statements are called # then this is needed to avoid "cannot be executed from a function or multi-command string" errors if auto_commit: for statement in sqlparse.split(value): if statement: cur.execute(statement) else: cur.execute(value) return_value['view_scripts_deployed'].append(key) self._logger.debug('Views loaded to schema {0}'.format(schema_name)) else: self._logger.debug('No view scripts to deploy') # Executing triggers return_value['trigger_scripts_deployed'] = [] if len(trigger_scripts_dict) > 0: self._logger.debug('Running trigger definitions scripts') for key, value in trigger_scripts_dict.items(): # if auto commit mode than every statement is called separately. # this is done this way as auto commit is normally used when non transaction statements are called # then this is needed to avoid "cannot be executed from a function or multi-command string" errors if auto_commit: for statement in sqlparse.split(value): if statement: cur.execute(statement) else: cur.execute(value) return_value['trigger_scripts_deployed'].append(key) self._logger.debug('Triggers loaded to schema {0}'.format(schema_name)) else: self._logger.debug('No trigger scripts to deploy') # alter schema privileges if needed if (not files_deployment) and mode != 'overwrite' \ and self._config.scope == pgpm.lib.utils.config.SchemaConfiguration.SCHEMA_SCOPE: if self._config.usage_roles: pgpm.lib.utils.db.SqlScriptsHelper.grant_usage_privileges( cur, schema_name, ', '.join(self._config.usage_roles)) self._logger.debug('User(s) {0} was (were) granted usage permissions on schema {1}.' .format(", ".join(self._config.usage_roles), schema_name)) if self._config.owner_role: pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_alter_schema_owner', [schema_name, self._config.owner_role]) self._logger.debug('Ownership of schema {0} and all its objects was changed and granted to user {1}.' .format(schema_name, self._config.owner_role)) # Add metadata to pgpm schema pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.callproc('_upsert_package_info'.format(self._pgpm_schema_name), [self._config.name, self._config.subclass, self._config.version.major, self._config.version.minor, self._config.version.patch, self._config.version.pre, self._config.version.metadata, self._config.description, self._config.license, list_of_deps_ids, vcs_ref, vcs_link, issue_ref, issue_link]) self._logger.debug('Meta info about deployment was added to schema {0}' .format(self._pgpm_schema_name)) pgpm_package_id = cur.fetchone()[0] if len(table_scripts_dict) > 0: for key in executed_table_scripts: cur.callproc('_log_table_evolution'.format(self._pgpm_schema_name), [key, pgpm_package_id]) # Commit transaction self._conn.commit() self._conn.close() deployed_files_count = len(return_value['function_scripts_deployed']) + \ len(return_value['type_scripts_deployed']) + \ len(return_value['view_scripts_deployed']) + \ len(return_value['trigger_scripts_deployed']) + \ len(return_value['table_scripts_deployed']) requested_files_count = len(return_value['function_scripts_requested']) + \ len(return_value['type_scripts_requested']) + \ len(return_value['view_scripts_requested']) + \ len(return_value['trigger_scripts_requested']) + \ len(return_value['table_scripts_requested']) return_value['deployed_files_count'] = deployed_files_count return_value['requested_files_count'] = requested_files_count if deployed_files_count == requested_files_count: return_value['code'] = self.DEPLOYMENT_OUTPUT_CODE_OK return_value['message'] = 'OK' else: return_value['code'] = self.DEPLOYMENT_OUTPUT_CODE_NOT_ALL_DEPLOYED return_value['message'] = 'Not all requested files were deployed' return return_value def _get_scripts(self, scripts_path_rel, files_deployment, script_type, project_path): """ Gets scripts from specified folders """ scripts_dict = {} if scripts_path_rel: self._logger.debug('Getting scripts with {0} definitions'.format(script_type)) scripts_dict = pgpm.lib.utils.misc.collect_scripts_from_sources(scripts_path_rel, files_deployment, project_path, False, self._logger) if len(scripts_dict) == 0: self._logger.debug('No {0} definitions were found in {1} folder'.format(script_type, scripts_path_rel)) else: self._logger.debug('No {0} folder was specified'.format(script_type)) return scripts_dict def _resolve_dependencies(self, cur, dependencies): """ Function checks if dependant packages are installed in DB """ list_of_deps_ids = [] _list_of_deps_unresolved = [] _is_deps_resolved = True for k, v in dependencies.items(): pgpm.lib.utils.db.SqlScriptsHelper.set_search_path(cur, self._pgpm_schema_name) cur.execute("SELECT _find_schema('{0}', '{1}')" .format(k, v)) pgpm_v_ext = tuple(cur.fetchone()[0][1:-1].split(',')) try: list_of_deps_ids.append(int(pgpm_v_ext[0])) except: pass if not pgpm_v_ext[0]: _is_deps_resolved = False _list_of_deps_unresolved.append("{0}: {1}".format(k, v)) return _is_deps_resolved, list_of_deps_ids, _list_of_deps_unresolved def _reorder_types(self, types_script): """ Takes type scripts and reorders them to avoid Type doesn't exist exception """ self._logger.debug('Running types definitions scripts') self._logger.debug('Reordering types definitions scripts to avoid "type does not exist" exceptions') _type_statements = sqlparse.split(types_script) # TODO: move up to classes _type_statements_dict = {} # dictionary that store statements with type and order. type_unordered_scripts = [] # scripts to execute without order type_drop_scripts = [] # drop scripts to execute first for _type_statement in _type_statements: _type_statement_parsed = sqlparse.parse(_type_statement) if len(_type_statement_parsed) > 0: # can be empty parsed object so need to check # we need only type declarations to be ordered if _type_statement_parsed[0].get_type() == 'CREATE': _type_body_r = r'\bcreate\s+\b(?:type|domain)\s+\b(\w+\.\w+|\w+)\b' _type_name = re.compile(_type_body_r, flags=re.IGNORECASE).findall(_type_statement)[0] _type_statements_dict[str(_type_name)] = \ {'script': _type_statement, 'deps': []} elif _type_statement_parsed[0].get_type() == 'DROP': type_drop_scripts.append(_type_statement) else: type_unordered_scripts.append(_type_statement) # now let's add dependant types to dictionary with types # _type_statements_list = [] # list of statements to be ordered for _type_key in _type_statements_dict.keys(): for _type_key_sub, _type_value in _type_statements_dict.items(): if _type_key != _type_key_sub: if pgpm.lib.utils.misc.find_whole_word(_type_key)(_type_value['script']): _type_value['deps'].append(_type_key) # now let's add order to type scripts and put them ordered to list _deps_unresolved = True _type_script_order = 0 _type_names = [] type_ordered_scripts = [] # ordered list with scripts to execute while _deps_unresolved: for k, v in _type_statements_dict.items(): if not v['deps']: _type_names.append(k) v['order'] = _type_script_order _type_script_order += 1 if not v['script'] in type_ordered_scripts: type_ordered_scripts.append(v['script']) else: _dep_exists = True for _dep in v['deps']: if _dep not in _type_names: _dep_exists = False if _dep_exists: _type_names.append(k) v['order'] = _type_script_order _type_script_order += 1 if not v['script'] in type_ordered_scripts: type_ordered_scripts.append(v['script']) else: v['order'] = -1 _deps_unresolved = False for k, v in _type_statements_dict.items(): if v['order'] == -1: _deps_unresolved = True return type_drop_scripts, type_ordered_scripts, type_unordered_scripts PKƉ&Hxpgpm/lib/__init__.py__author__ = 'apanchoyan' PKɅrH  pgpm/lib/abstract_deploy.pyimport logging import psycopg2 import pgpm.lib.utils import pgpm.lib.utils.db import pgpm.lib.version import pgpm.lib.utils.config import pgpm.lib.utils.vcs class AbstractDeploymentManager(object): """ "Abstract" class (not intended to be called directly) that sets basic configuration and interface for classes that manage deployments within db """ def __init__(self, connection_string, pgpm_schema_name='_pgpm', logger=None): """ initialises the manager and connects to the DB :param connection_string: connection string consumable by DBAPI 2.0 :param pgpm_schema_name: name of pgpm schema (default '_pgpm') :param logger: logger object """ self._logger = logger or logging.getLogger(__name__) self._connection_string = connection_string self._conn = psycopg2.connect(connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) self._conn.init(logger) self._pgpm_schema_name = pgpm_schema_name self._pgpm_version = pgpm.lib.utils.config.Version(pgpm.lib.version.__version__, pgpm.lib.utils.config.VersionTypes.python) DEPLOYMENT_OUTPUT_CODE_OK = 0 DEPLOYMENT_OUTPUT_CODE_NOT_ALL_DEPLOYED = 1 PKɅrHsϜ pgpm/lib/execute.pyimport logging import distutils.version import sys import psycopg2 import sqlparse import csv import pgpm.lib.abstract_deploy import pgpm.lib.utils import pgpm.lib.utils.db import pgpm.lib.version import pgpm.lib.utils.config import pgpm.lib.utils.vcs class QueryExecutionManager(pgpm.lib.abstract_deploy.AbstractDeploymentManager): """ Class that will manage calling procedures """ def __init__(self, connection_string, pgpm_schema_name='_pgpm', logger=None): """ initialises the manager and connects to the DB :param connection_string: connection string consumable by DBAPI 2.0 :param logger: logger object """ super(QueryExecutionManager, self).__init__(connection_string, pgpm_schema_name, logger) self._logger.debug('Initialised db connection.') def execute(self, query, until_zero=False): """ Execute a query :param query: query to execute :param until_zero: should query be called until returns 0 :return: """ if self._conn.closed: self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) cur = self._conn.cursor() # be cautious, dangerous thing self._conn.autocommit = True # Check if DB is pgpm enabled if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, self._pgpm_schema_name): self._logger.error('Can\'t deploy schemas to DB where pgpm was not installed. ' 'First install pgpm by running pgpm install') self._conn.close() sys.exit(1) # check installed version of _pgpm schema. pgpm_v_db_tuple = pgpm.lib.utils.db.SqlScriptsHelper.get_pgpm_db_version(cur, self._pgpm_schema_name) pgpm_v_db = distutils.version.StrictVersion(".".join(pgpm_v_db_tuple)) pgpm_v_script = distutils.version.StrictVersion(pgpm.lib.version.__version__) if pgpm_v_script > pgpm_v_db: self._logger.error('{0} schema version is outdated. Please run pgpm install --upgrade first.' .format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) elif pgpm_v_script < pgpm_v_db: self._logger.error('Deployment script\'s version is lower than the version of {0} schema ' 'installed in DB. Update pgpm script first.'.format(self._pgpm_schema_name)) self._conn.close() sys.exit(1) # Executing query if until_zero: self._logger.debug('Running query {0} until it returns 0 (but not more than 10000 times' .format(query)) proc_return_value = None counter = 0 while proc_return_value != 0: cur.execute(query) proc_return_value = cur.fetchone()[0] counter += 1 if counter > 9999: break else: self._logger.debug('Running query {0}'.format(query)) cur.execute(query) # Commit transaction self._conn.commit() self._conn.close() return 0 PKɅrH9]jpgpm/lib/utils/db.pyimport psycopg2 import psycopg2.extensions import logging import re try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse def parse_connection_string_psycopg2(connection_string): """ parses psycopg2 consumable connection string :param connection_string: :return: return dictionary with connection string parts """ conn_prepared = {} conn_parsed = urlparse(connection_string) if not conn_parsed.hostname: _re_dbstr = re.compile(r'\bhost=(?P[0-9a-zA-Z_.!@#$%^&*()~]+)|' r'dbname=(?P[0-9a-zA-Z_.!@#$%^&*()~]+)|' r'port=(?P[0-9a-zA-Z_.!@#$%^&*()~]+)|' r'user=(?P[0-9a-zA-Z_.!@#$%^&*()~]+)|' r'password=(?P[0-9a-zA-Z_.!@#$%^&*()~]+)\b', re.IGNORECASE) for match in _re_dbstr.finditer(connection_string): match_dict = match.groupdict() if match_dict['host']: conn_prepared['host'] = match_dict['host'] if match_dict['port']: conn_prepared['port'] = match_dict['port'] if match_dict['dbname']: conn_prepared['dbname'] = match_dict['dbname'] if match_dict['user']: conn_prepared['user'] = match_dict['user'] if match_dict['password']: conn_prepared['password'] = match_dict['password'] else: conn_prepared = { 'host': conn_parsed.hostname, 'port': conn_parsed.port, 'dbname': conn_parsed.path, 'user': conn_parsed.username, 'password': conn_parsed.password } return conn_prepared class MegaConnection(psycopg2.extensions.connection): """ A connection that uses `MegaCursor` automatically. """ def __init__(self, dsn, *more): psycopg2.extensions.connection.__init__(self, dsn, *more) self._last_notice_flushed_index = -1 self.logger = logging.getLogger(__name__) def cursor(self, *args, **kwargs): kwargs.setdefault('cursor_factory', MegaCursor) return super(MegaConnection, self).cursor(*args, **kwargs) def fetch_new_notices(self): if len(self.notices) > self._last_notice_flushed_index + 1: unflushed_notices = self.notices[self._last_notice_flushed_index + 1:len(self.notices)] self._last_notice_flushed_index = len(self.notices) - 1 return unflushed_notices else: return None def init(self, logger): """Initialize the connection to log to `!logger`. The `!logger` parameter is a Logger instance from the standard logging module. """ self.logger = logger or self.logger def close(self, rollback=True): # rollback or commit only if connection has transaction in progress if self.status == psycopg2.extensions.STATUS_IN_TRANSACTION: # need to do as some middlewares like pgBouncer incorrectly react to implicit rollback # see more here: http://initd.org/psycopg/docs/connection.html#connection.close if rollback: self.rollback() self.logger.debug('Active transaction rolled back.') else: self.commit() self.logger.debug('Active transaction committed.') r_value = super(MegaConnection, self).close() self.logger.debug('Connection closed.') return r_value class MegaCursor(psycopg2.extensions.cursor): def __init__(self, *args, **kwargs): psycopg2.extensions.cursor.__init__(self, *args, **kwargs) if self.connection.__class__.__name__ != 'MegaConnection': raise self.connection.ProgrammingError( 'MegaCursor can only be used with MegaConnection. Instead type "{0}" is used. ' 'Reinitialise db connection with correct class'.format(self.connection.__class__.__name__)) def execute(self, query, args=None): try: return super(MegaCursor, self).execute(query, args) except Exception: raise finally: self.connection.logger.debug('Executed query: {0}'.format(self.query.decode('utf-8'))) noticies = self.connection.fetch_new_notices() if noticies: for notice in noticies: self.connection.logger.debug(notice) def callproc(self, procname, args=None): try: return super(MegaCursor, self).callproc(procname, args) except Exception: raise finally: self.connection.logger.debug('Called stored procedure: {0}'.format(self.query.decode('utf-8'))) noticies = self.connection.fetch_new_notices() if noticies: for notice in noticies: self.connection.logger.debug(notice) def close(self): r_value = super(MegaCursor, self).close() self.connection.logger.debug('Cursor closed.') return r_value class SqlScriptsHelper: current_user_sql = 'select * from CURRENT_USER;' is_superuser_sql = 'select usesuper from pg_user where usename = CURRENT_USER;' @classmethod def get_pgpm_db_version(cls, cur, schema_name='_pgpm'): """ returns current version of pgpm schema :return: tuple of major, minor and patch components of version """ cls.set_search_path(cur, schema_name) cur.execute("SELECT _find_schema('{0}', '{1}')" .format(schema_name, 'x')) # TODO: make it work with the way it's written below. currently throws error as func returns record # without column list # cur.callproc('_find_schema', [schema_name, 'x']) pgpm_v_ext = tuple(cur.fetchone()[0][1:-1].split(',')) return pgpm_v_ext[2], pgpm_v_ext[3], pgpm_v_ext[4] @classmethod def create_db_schema(cls, cur, schema_name): """ Create Postgres schema script and execute it on cursor """ create_schema_script = "CREATE SCHEMA {0} ;\n".format(schema_name) cur.execute(create_schema_script) @classmethod def grant_usage_privileges(cls, cur, schema_name, roles): """ Sets search path """ cur.execute('GRANT USAGE ON SCHEMA {0} TO {1};' 'GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA {0} TO {1};' .format(schema_name, roles)) @classmethod def grant_usage_install_privileges(cls, cur, schema_name, roles): """ Sets search path """ cur.execute('GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA {0} TO {1};' 'GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA {0} TO {1};' 'GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA {0} TO {1};' .format(schema_name, roles)) @classmethod def grant_default_usage_install_privileges(cls, cur, schema_name, roles): """ Sets search path """ cur.execute('ALTER DEFAULT PRIVILEGES IN SCHEMA {0} ' 'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO {1};' 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} GRANT EXECUTE ON FUNCTIONS TO {1};' 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} ' 'GRANT USAGE, SELECT ON SEQUENCES TO {1};' .format(schema_name, roles)) @classmethod def set_search_path(cls, cur, schema_name): """ Sets search path """ cur.execute('set search_path TO {0}, public;' .format(schema_name)) @classmethod def schema_exists(cls, cur, schema_name): """ Check if schema exists """ cur.execute("SELECT EXISTS (SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{0}');" .format(schema_name)) return cur.fetchone()[0]PKƉ&H`__pgpm/lib/utils/config.pyimport json import re import os class SchemaConfiguration(object): """ stores properties of schema configuration """ description = "" license = "" owner_role = "" usage_roles = [] SCHEMA_SCOPE = "SCHEMA" DATABASE_SCOPE = "DATABASE" BASIC_SUBCLASS = "basic" VERSIONED_SUBCLASS = "versioned" def __init__(self, config_path=None, config_dict=None, project_path='.'): """ Sets configuration object by getting info from a file or/and a dict and populates properties with config data :param config_path: path to a file :param config_dict: python dictionary with config """ config_file = None if config_path: config_file = open(config_path) if config_dict and config_file: config_file_dict = json.load(config_file) config_dict = dict(config_file_dict, **config_dict) config_file.close() elif config_file: config_dict = json.load(config_file) config_file.close() if config_dict: self.name = config_dict["name"] self.subclass = config_dict["subclass"] self.version = Version(config_dict["version"], VersionTypes.postgres) self.description = None if "description" in config_dict: self.description = config_dict["description"] self.license = None if "license" in config_dict: self.license = config_dict["license"] self.owner_role = None if "owner_role" in config_dict: self.owner_role = config_dict["owner_role"] self.usage_roles = None if "usage_roles" in config_dict: self.usage_roles = config_dict["usage_roles"] self.dependencies = None if "dependencies" in config_dict: self.dependencies = config_dict["dependencies"] self.scope = self.SCHEMA_SCOPE if "scope" in config_dict: self.scope = config_dict["scope"].upper() self.types_path = None if "types_path" in config_dict: self.types_path = [] # can be list of paths or a string, anyways converted to list if not isinstance(config_dict["types_path"], list): config_dict["types_path"] = [config_dict["types_path"]] for item in config_dict["types_path"]: self.types_path.append(os.path.abspath(os.path.join(project_path, item))) self.functions_path = None if "functions_path" in config_dict: self.functions_path = [] # can be list of paths or a string, anyways converted to list if not isinstance(config_dict["functions_path"], list): config_dict["functions_path"] = [config_dict["functions_path"]] for item in config_dict["functions_path"]: self.functions_path.append(os.path.abspath(os.path.join(project_path, item))) self.views_path = None if "views_path" in config_dict: self.views_path = [] # can be list of paths or a string, anyways converted to list if not isinstance(config_dict["views_path"], list): config_dict["views_path"] = [config_dict["views_path"]] for item in config_dict["views_path"]: self.views_path.append(os.path.abspath(os.path.join(project_path, item))) self.triggers_path = None if "triggers_path" in config_dict: self.triggers_path = [] # can be list of paths or a string, anyways converted to list if not isinstance(config_dict["triggers_path"], list): config_dict["triggers_path"] = [config_dict["triggers_path"]] for item in config_dict["triggers_path"]: self.triggers_path.append(os.path.abspath(os.path.join(project_path, item))) self.tables_path = None if "tables_path" in config_dict: self.tables_path = [] # can be list of paths or a string, anyways converted to list if not isinstance(config_dict["tables_path"], list): config_dict["tables_path"] = [config_dict["tables_path"]] for item in config_dict["tables_path"]: self.tables_path.append(os.path.abspath(os.path.join(project_path, item))) else: raise ValueError("Empty configuration") class VersionTypes(object): """ Version types """ postgres = 'postgres' semver = 'semver' x_postgres = 'x_postgres' x_semver = 'x_semver' python = 'python' class Version(object): """ Version of schema """ major = 0 minor = 0 patch = 0 pre = None metadata = None raw = '' def __init__(self, version_string, version_type=VersionTypes.postgres): """ Parses string version of pg schema written in a format similar to semver but "_" instead of "." is used :param version_string: string version of pg schema :param version_type: type of version string. Defaults to postgres :return: tuple with major, minor, patch, pre and metadata """ self.raw = version_string if version_type == VersionTypes.postgres: version_r = r'^(?P\d+)_(?P\d+)_(?P\d+)' version_list = re.compile(version_r, flags=re.IGNORECASE).findall(version_string) self.major = int(version_list[0][0]) self.minor = int(version_list[0][1]) self.patch = int(version_list[0][2]) elif version_type == VersionTypes.x_postgres: version_r = r'^(?P\d+|x+)_(?P\d+|x+)_(?P\d+|x+)' version_list = re.compile(version_r, flags=re.IGNORECASE).findall(version_string) if "".join(set(version_list[0][0])).lower() == 'x': self.major = -1 else: self.major = int(version_list[0][0]) if "".join(set(version_list[0][1])).lower() == 'x': self.minor = -1 else: self.minor = int(version_list[0][1]) if "".join(set(version_list[0][2])).lower() == 'x': self.patch = -1 else: self.patch = int(version_list[0][2]) elif version_type == VersionTypes.python: # Implementation from http://svn.python.org/projects/python/branches/pep-0384/Lib/distutils/version.py version_r = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE) version_match = version_r.match(version_string) if not version_match: raise ValueError("invalid version number '%s'" % version_string) (self.major, self.minor, self.patch, self.pre, self.metadata) = version_match.group(1, 2, 4, 5, 6) elif version_type == VersionTypes.semver: pass # TODO: implement else: raise ValueError('version_type must be of VersionTypes values') def to_string(self): """ stringifies version :return: string of version """ if self.major == -1: major_str = 'x' else: major_str = self.major if self.minor == -1: minor_str = 'x' else: minor_str = self.minor if self.patch == -1: patch_str = 'x' else: patch_str = self.patch return '{0}_{1}_{2}'.format(major_str, minor_str, patch_str)PKɅrH}!%KKpgpm/lib/utils/misc.pyimport io import logging import os import re import pkg_resources from pgpm import settings def find_whole_word(w): """ Scan through string looking for a location where this word produces a match, and return a corresponding MatchObject instance. Return None if no position in the string matches the pattern; note that this is different from finding a zero-length match at some point in the string. """ return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search def collect_scripts_from_sources(script_paths, files_deployment, project_path='.', is_package=False, logger=None): """ Collects postgres scripts from source files :param script_paths: list of strings or a string with a relative path to the directory containing files with scripts :param files_deployment: list of files that need to be harvested. Scripts from there will only be taken if the path to the file is in script_paths :param project_path: path to the project source code :param is_package: are files packaged with pip egg :param logger: pass the logger object if needed :return: """ logger = logger or logging.getLogger(__name__) scripts_dict = {} if script_paths: if not isinstance(script_paths, list): # can be list of paths or a string, anyways converted to list script_paths = [script_paths] if is_package: for script_path in script_paths: for file_info in pkg_resources.resource_listdir('pgpm', script_path): file_content = pkg_resources.resource_string('pgpm', '{0}/{1}'.format(script_path, file_info))\ .decode('utf-8') if file_content: scripts_dict[file_info] = file_content logger.debug('File {0}/{1} collected.'.format(script_path, file_info)) else: logger.debug('File {0}/{1} not collected as it\'s empty.'.format(script_path, file_info)) else: if files_deployment: # if specific script to be deployed, only find them for list_file_name in files_deployment: list_file_full_path = os.path.join(project_path, list_file_name) if os.path.isfile(list_file_full_path): for i in range(len(script_paths)): if script_paths[i] in list_file_full_path: file_content = io.open(list_file_full_path, 'r', -1, 'utf-8-sig', 'ignore').read() if file_content: scripts_dict[list_file_name] = file_content logger.debug('File {0} collected.'.format(list_file_full_path)) else: logger.debug('File {0} not collected as it\'s empty.'.format(list_file_full_path)) else: logger.debug('File {0} is not found in any of {1} folders, please specify a correct path' .format(list_file_full_path, script_paths)) else: for script_path in script_paths: for subdir, dirs, files in os.walk(script_path): files = sorted(files) for file_info in files: if file_info != settings.CONFIG_FILE_NAME and file_info[0] != '.': file_content = io.open(os.path.join(subdir, file_info), 'r', -1, 'utf-8-sig', 'ignore').read() if file_content: scripts_dict[file_info] = file_content logger.debug('File {0} collected'.format(os.path.join(subdir, file_info))) else: logger.debug('File {0} not collected as it\'s empty.' .format(os.path.join(subdir, file_info))) return scripts_dict PKɅrHpgpm/lib/utils/__init__.pyPKƉ&Hp`FFpgpm/lib/utils/vcs.pyimport os import subprocess def is_git_directory(path='.'): """ Checks if given directory is a git repository :param path: path to check :return: True if it's a git repo and False otherwise """ if path and subprocess.call(['git', '-C', path, 'rev-parse', '--is-inside-work-tree'], stderr=subprocess.STDOUT, stdout=open(os.devnull, 'w')) == 0: return True else: return False def get_git_revision_hash(path='.'): """ Get git HEAD hash :param path: path to repo :return: hash or exception """ return subprocess.check_output(['git', '-C', path, 'rev-parse', 'HEAD']).strip().decode('utf-8') def get_git_remote(path='.', remote='origin', type='Fetch'): """ Get git remote url :param path: path to repo :param remote: :param type: :return: remote url or exception """ str = subprocess.check_output(['git', '-C', path, 'remote', 'show', '-n', remote]).strip().decode('utf-8') str = str[str.find(type):] str = str[:str.find('\n')].split(' ')[-1] return str PKƉ&H*-pgpm/lib/db_scripts/deploy_prepare_config.sql-- Preparation SET statements for deployment SET statement_timeout = 0; SET client_encoding = 'UTF8'; SET standard_conforming_strings = off; SET check_function_bodies = false; SET client_min_messages = warning; SET escape_string_warning = off;PKƉ&Hk0|++$pgpm/lib/db_scripts/install.tmpl.sqlDO $$BEGIN CREATE SCHEMA {schema_name}; SET search_path TO {schema_name}, public; CREATE TABLE package_subclasses ( pkg_sc_id SERIAL NOT NULL, pkg_sc_name TEXT, pkg_sc_created TIMESTAMP DEFAULT now(), pkg_sc_last_modified TIMESTAMP DEFAULT now(), CONSTRAINT package_subclass_pkey PRIMARY KEY (pkg_sc_id) ); INSERT INTO package_subclasses (pkg_sc_name) VALUES ('versioned'); INSERT INTO package_subclasses (pkg_sc_name) VALUES ('basic'); -- info on packages CREATE TABLE packages ( pkg_id serial NOT NULL, pkg_name character varying(255), pkg_description text, pkg_v_major integer, pkg_v_minor integer, pkg_v_patch integer, pkg_v_pre character varying(255), pkg_v_metadata character varying(255), pkg_old_rev integer, pkg_vcs_ref varchar(255), pkg_subclass integer, pkg_license text, pkg_created timestamp without time zone DEFAULT now(), pkg_last_modified timestamp without time zone DEFAULT statement_timestamp(), CONSTRAINT package_pkey PRIMARY KEY (pkg_id), CONSTRAINT package_subclass_fkey FOREIGN KEY (pkg_subclass) REFERENCES package_subclasses (pkg_sc_id) ); -- info on package dependencies CREATE TABLE package_dependencies ( pkg_dep_id serial NOT NULL, pkg_link_core_id integer NOT NULL, pkg_link_dep_id integer NOT NULL CHECK (pkg_link_core_id <> pkg_link_dep_id), CONSTRAINT package_dependency_pkey PRIMARY KEY (pkg_dep_id), CONSTRAINT package_link_core_fkey FOREIGN KEY (pkg_link_core_id) REFERENCES packages (pkg_id), CONSTRAINT package_link_dependency_fkey FOREIGN KEY (pkg_link_dep_id) REFERENCES packages (pkg_id) ); END$$;PKƉ&H1AA5pgpm/lib/db_scripts/functions/_add_migration_info.sqlCREATE OR REPLACE FUNCTION _add_migration_info(p_m_low_v TEXT, p_m_high_v TEXT) RETURNS VOID AS $BODY$ --- -- @description -- Adds migration info migrations log table -- -- @param p_m_low_v -- lower border of version that is applicaple for this migration -- -- @param p_m_high_v -- package type: either version (with version suffix at the end of the name) or basic (without) -- -- @param p_pkg_old_rev -- higher border of version that is applicaple for this migration -- --- BEGIN BEGIN INSERT INTO migrations_log (m_low_v, m_high_v) VALUES (p_m_low_v, p_m_high_v); EXCEPTION WHEN SQLSTATE '42P01' THEN RAISE WARNING 'Can''t log migration from % to % as migrations_log is not defined yet', p_m_low_v, p_m_high_v; END; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&H(ae e 7pgpm/lib/db_scripts/functions/_set_revision_package.sqlCREATE OR REPLACE FUNCTION _set_revision_package(p_pkg_name TEXT, p_pkg_subclass_name TEXT, p_pkg_old_rev INTEGER, p_pkg_v_major INTEGER, p_pkg_v_minor INTEGER DEFAULT 0, p_pkg_v_patch INTEGER DEFAULT 0, p_pkg_v_pre TEXT DEFAULT NULL) RETURNS INTEGER AS $BODY$ --- -- @description -- Set package as old revision, info logged in deployment events table -- -- @param p_pkg_name -- package name -- -- @param p_pkg_subclass_name -- package type: either version (with version suffix at the end of the name) or basic (without) -- -- @param p_pkg_old_rev -- Revision name of package. Used in moderate form. When schema is renamed it is given a revision suffix from here -- -- @param p_pkg_v_major -- package major part of version (according to semver) -- -- @param p_pkg_v_minor -- package minor part of version (according to semver) -- -- @param p_pkg_v_patch -- package patch part of version (according to semver) -- -- @param p_pkg_v_pre -- package pre part of version (according to semver) -- -- @param p_pkg_v_metadata -- package metadata part of version (according to semver) -- -- @param p_pkg_vcs_ref -- vcs reference to track the code --- DECLARE l_existing_pkg_id INTEGER; l_pkg_dep_id INTEGER; return_value INTEGER; BEGIN SELECT pkg_id INTO l_existing_pkg_id FROM packages WHERE pkg_name = p_pkg_name AND pkg_subclass IN (SELECT pkg_sc_id FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name) AND pkg_v_major = p_pkg_v_major AND (pkg_v_minor IS NULL OR pkg_v_minor = p_pkg_v_minor) AND (pkg_v_patch IS NULL OR pkg_v_patch = p_pkg_v_patch) AND (pkg_v_pre IS NULL OR pkg_v_pre = p_pkg_v_pre); IF FOUND THEN UPDATE packages SET pkg_old_rev=p_pkg_old_rev WHERE packages.pkg_id=l_existing_pkg_id; INSERT INTO deployment_events (dpl_ev_pkg_id, dpl_ev_txid, dpl_ev_vcs_ref, dpl_ev_vcs_link, dpl_ev_issue_id, dpl_ev_issue_link) SELECT pkg_id, txid_current(), dpl_ev_vcs_ref, dpl_ev_vcs_link, dpl_ev_issue_id, dpl_ev_issue_link FROM packages JOIN deployment_events ON dpl_ev_pkg_id=pkg_id WHERE pkg_id=l_existing_pkg_id AND dpl_ev_time IN ( SELECT max(dpl_ev_time) FROM deployment_events WHERE dpl_ev_pkg_id=packages.pkg_id ); ELSE RAISE EXCEPTION 'Package % not found in the list of packages. It could happen if schema exists but wasn''t properly deployed with pgpm', p_pkg_name || '_' || p_pkg_v_major || '_' || p_pkg_v_minor || '_' || p_pkg_v_patch; END IF; -- Notify external channels of successful deployment event PERFORM pg_notify('deployment_events' || '$$' || p_pkg_name, p_pkg_v_major || '_' || p_pkg_v_minor || '_' || p_pkg_v_patch); RETURN return_value; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&HRoo6pgpm/lib/db_scripts/functions/_log_table_evolution.sqlCREATE OR REPLACE FUNCTION _log_table_evolution(p_t_evo_file_name TEXT, p_t_evo_package INTEGER) RETURNS VOID AS $BODY$ --- -- @description -- Adds information about executed table evolution script to log table -- -- @param p_t_evo_file_name -- File name with executed statements. -- -- @param p_t_evo_package -- Related package id -- -- @param p_pkg_old_rev -- higher border of version that is applicaple for this migration -- --- BEGIN INSERT INTO table_evolutions_log (t_evo_file_name, t_evo_package) VALUES (p_t_evo_file_name, p_t_evo_package); END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER;PKnuHvgg3pgpm/lib/db_scripts/functions/drop_schema_smart.sqlCREATE OR REPLACE FUNCTION drop_schema_smart(p_schema_name TEXT, p_dry_run BOOLEAN DEFAULT TRUE) RETURNS VOID AS $BODY$ ----------------------------------------------------------------------------------------------------------------------- -- -- @description -- To perform command Drop schema cascade for versioned and long unused ( outdated ) schemas -- -- @return -- message as output/exception -- -- @param p_schema_name -- schema to drop -- -- @param p_dry_run -- perform dry run or not -- p_dry_run=true --> run function in Dry mode -- p_dry_run=false --> run function in drop mode -- -- -------------------------------------------------------------------------------------------------------------------- DECLARE c_user_oid INTEGER := 16384; l_schema_with_dot TEXT; _detail TEXT; _hint TEXT; _message TEXT; BEGIN l_schema_with_dot := p_schema_name || '.'; -- check schema existance RAISE NOTICE 'Checking schema existance...'; IF NOT EXISTS(SELECT 1 FROM information_schema.schemata WHERE schema_name = p_schema_name) THEN RAISE EXCEPTION E'Schema % does not exist\n', p_schema_name; END IF; -- check table existance RAISE NOTICE 'Checking if schema contains any tables...'; IF EXISTS(SELECT 1 FROM information_schema.tables WHERE table_schema = p_schema_name) THEN RAISE EXCEPTION E'Schema contains at least one table!\n'; END IF; -- checking schema usage in stat_ACTIVITY RAISE NOTICE 'Checking whether schema currently in use ...'; IF EXISTS(SELECT 1 FROM pg_stat_activity WHERE query LIKE '%' || l_schema_with_dot || '%') THEN RAISE EXCEPTION E'Schema % currently in use\n', p_schema_name; END IF; -- checking type dependencies RAISE NOTICE 'Checking type dependencies...'; IF EXISTS(SELECT 1 FROM ( SELECT classid :: REGCLASS :: TEXT AS dep_obj_type, CASE classid WHEN 'pg_class' :: REGCLASS THEN objid :: REGCLASS :: TEXT WHEN 'pg_type' :: REGCLASS THEN objid :: REGTYPE :: TEXT WHEN 'pg_proc' :: REGCLASS THEN objid :: REGPROCEDURE :: TEXT ELSE objid :: TEXT END AS dep_obj, objsubid, refclassid :: REGCLASS :: TEXT AS ref_obj_type, CASE refclassid WHEN 'pg_class' :: REGCLASS THEN refobjid :: REGCLASS :: TEXT WHEN 'pg_type' :: REGCLASS THEN refobjid :: REGTYPE :: TEXT WHEN 'pg_proc' :: REGCLASS THEN refobjid :: REGPROCEDURE :: TEXT ELSE refobjid :: TEXT END AS ref_obj, refobjsubid, CASE deptype WHEN 'p' THEN 'pinned' WHEN 'i' THEN 'internal' WHEN 'a' THEN 'automatic' WHEN 'n' THEN 'normal' END AS dependency_type FROM pg_catalog.pg_depend WHERE objid >= c_user_oid OR refobjid >= c_user_oid) tab1 WHERE ref_obj LIKE l_schema_with_dot || '%' AND dependency_type = 'normal' AND dep_obj NOT LIKE l_schema_with_dot || '%') THEN RAISE EXCEPTION E'Schema has type dependencies with other schemas, please contact DBA team. \n'; END IF; IF p_dry_run IS TRUE THEN RAISE NOTICE 'THIS IS THE DRY RUN. TO DROP SCHEMA % PASS FALSE TO DRY RUN PARAMETER', p_schema_name; ELSE RAISE NOTICE 'Dropping schema % cascading...', p_schema_name; EXECUTE 'drop schema ' || p_schema_name || ' cascade'; RAISE NOTICE 'Dropped SUCCESSFULLY '; END IF; RETURN; END; $BODY$ LANGUAGE plpgsql SECURITY INVOKER;PKɅrH`J3 3 5pgpm/lib/db_scripts/functions/_alter_schema_owner.sqlCREATE OR REPLACE FUNCTION _alter_schema_owner(p_schema TEXT, p_owner TEXT) RETURNS VOID AS $BODY$ --- -- @description -- Alters ownership of schema and all its objects to a specified user. -- -- @param p_schema -- Schema name -- -- @param p_owner -- New owner user name -- --- DECLARE l_schema TEXT; l_functions TEXT; l_tables TEXT; l_sequences TEXT; l_views TEXT; l_domains TEXT; l_triggers TEXT; l_types TEXT; BEGIN l_schema := 'ALTER SCHEMA ' || quote_ident(p_schema) || ' OWNER TO ' || quote_ident(p_owner) || ';'; SELECT string_agg('ALTER FUNCTION ' || quote_ident(n.nspname) || '.' || quote_ident(p.proname) || '(' || pg_catalog.pg_get_function_identity_arguments(p.oid) || ') OWNER TO ' || p_owner || ';' , E'\n') AS _sql FROM pg_catalog.pg_proc p JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace WHERE n.nspname = p_schema INTO l_functions; IF l_functions IS NULL THEN l_functions := ''; END IF; SELECT string_agg('ALTER TABLE ' || quote_ident(schemaname) || '.' || quote_ident(tablename) ||' OWNER TO ' || p_owner || ';', E'\n') FROM pg_tables WHERE schemaname = p_schema INTO l_tables; IF l_tables IS NULL THEN l_tables := ''; END IF; SELECT string_agg('ALTER SEQUENCE ' || quote_ident(sequence_schema) || '.' || quote_ident(sequence_name) ||' OWNER TO ' || p_owner || ';', E'\n') FROM information_schema.sequences WHERE sequence_schema = p_schema INTO l_sequences; IF l_sequences IS NULL THEN l_sequences := ''; END IF; SELECT string_agg('ALTER VIEW ' || quote_ident(table_schema) || '.' || quote_ident(table_name) ||' OWNER TO ' || p_owner || ';', E'\n') FROM information_schema.views WHERE table_schema = p_schema INTO l_views; IF l_views IS NULL THEN l_views := ''; END IF; SELECT string_agg('ALTER DOMAIN ' || quote_ident(domain_schema) || '.' || quote_ident(domain_name) ||' OWNER TO ' || p_owner || ';', E'\n') FROM information_schema.domains WHERE domain_schema = p_schema INTO l_domains; IF l_domains IS NULL THEN l_domains := ''; END IF; SELECT string_agg('ALTER TRIGGER ' || quote_ident(trigger_schema) || '.' || quote_ident(trigger_name) ||' OWNER TO ' || p_owner || ';', E'\n') FROM information_schema.triggers WHERE trigger_schema = p_schema INTO l_triggers; IF l_triggers IS NULL THEN l_triggers := ''; END IF; SELECT string_agg('ALTER TYPE ' || quote_ident(user_defined_type_schema) || '.' || quote_ident(user_defined_type_name) ||' OWNER TO ' || p_owner || ';', E'\n') FROM information_schema.user_defined_types WHERE user_defined_type_schema = p_schema INTO l_types; IF l_types IS NULL THEN l_types := ''; END IF; EXECUTE l_schema || l_functions || l_tables || l_sequences || l_views || l_domains || l_triggers || l_types; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY INVOKER;PKƉ&H!Q~1pgpm/lib/db_scripts/functions/_log_ddl_change.sqlCREATE OR REPLACE FUNCTION _log_ddl_change() RETURNS EVENT_TRIGGER AS $BODY$ --- -- @description -- Logs any DDL changes to the DB -- --- DECLARE l_current_query TEXT; l_txid BIGINT; BEGIN SELECT current_query() INTO l_current_query; SELECT txid_current() INTO l_txid; INSERT INTO _pgpm.ddl_changes_log (ddl_change) SELECT l_current_query WHERE NOT EXISTS ( SELECT ddl_change_txid, ddl_change FROM _pgpm.ddl_changes_log WHERE ddl_change_txid = l_txid AND ddl_change = l_current_query ); -- Notify external channels of ddl change PERFORM pg_notify('ddl_change', "session_user"()); END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER;PKɅrHSS8pgpm/lib/db_scripts/functions/_is_table_ddl_executed.sqlCREATE OR REPLACE FUNCTION _is_table_ddl_executed(p_file_name TEXT, p_pkg_name TEXT, p_pkg_subclass_name TEXT, p_pkg_v_major INTEGER, p_pkg_v_minor INTEGER DEFAULT 0, p_pkg_v_patch INTEGER DEFAULT 0, p_pkg_v_pre TEXT DEFAULT NULL) RETURNS BOOLEAN AS $BODY$ --- -- @description -- Checks whether file with table ddl has already been executed -- -- @param p_file_name -- File name used to check whether statements were executed -- -- @returns -- True if executed, False otherwise --- DECLARE l_existing_pkg_id INTEGER; return_value BOOLEAN; BEGIN IF p_pkg_subclass_name = 'basic' THEN SELECT pkg_id INTO l_existing_pkg_id FROM packages WHERE pkg_name = p_pkg_name AND pkg_subclass IN (SELECT pkg_sc_id FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name); ELSE SELECT pkg_id INTO l_existing_pkg_id FROM packages WHERE pkg_name = p_pkg_name AND pkg_subclass IN (SELECT pkg_sc_id FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name) AND pkg_v_major = p_pkg_v_major AND (pkg_v_minor IS NULL OR pkg_v_minor = p_pkg_v_minor) AND (pkg_v_patch IS NULL OR pkg_v_patch = p_pkg_v_patch) AND (pkg_v_pre IS NULL OR pkg_v_pre = p_pkg_v_pre) AND pkg_old_rev IS NULL; END IF; IF FOUND THEN SELECT EXISTS ( SELECT t_evo_id FROM table_evolutions_log WHERE t_evo_file_name=p_file_name AND t_evo_package = l_existing_pkg_id ) INTO return_value; RETURN return_value; ELSE RETURN FALSE; END IF; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&H6pgpm/lib/db_scripts/functions/_upsert_package_info.sqlCREATE OR REPLACE FUNCTION _upsert_package_info(p_pkg_name TEXT, p_pkg_subclass_name TEXT, p_pkg_v_major INTEGER, p_pkg_v_minor INTEGER DEFAULT 0, p_pkg_v_patch INTEGER DEFAULT 0, p_pkg_v_pre TEXT DEFAULT NULL, p_pkg_v_metadata TEXT DEFAULT NULL, p_pkg_description TEXT DEFAULT '', p_pkg_license TEXT DEFAULT NULL, p_pkg_deps_ids INTEGER[] DEFAULT '{}', p_pkg_vcs_ref TEXT DEFAULT NULL, p_pkg_vcs_link TEXT DEFAULT NULL, p_pkg_issue_ref TEXT DEFAULT NULL, p_pkg_issue_link TEXT DEFAULT NULL) RETURNS INTEGER AS $BODY$ --- -- @description -- Adds package info to pgpm package info table, deployment events table and notifies channels of deployment -- -- @param p_pkg_name -- package name -- -- @param p_pkg_subclass_name -- package type: either version (with version suffix at the end of the name) or basic (without) -- -- @param p_pkg_description -- package description -- -- @param p_pkg_v_major -- package major part of version (according to semver) -- -- @param p_pkg_v_minor -- package minor part of version (according to semver) -- -- @param p_pkg_v_patch -- package patch part of version (according to semver) -- -- @param p_pkg_v_pre -- package pre part of version (according to semver) -- -- @param p_pkg_v_metadata -- package metadata part of version (according to semver) -- -- @param p_pkg_license -- package license name/text -- -- @param p_pkg_deps_ids -- IDs of dependent schemas -- -- @param p_pkg_vcs_ref -- vcs reference to track the code -- -- @param p_pkg_vcs_link -- repository link to track the code -- -- @param p_pkg_issue_ref -- issue reference to track the code -- -- @param p_pkg_issue_link -- issue tracking system link --- DECLARE l_existing_pkg_id INTEGER; l_pkg_dep_id INTEGER; return_value INTEGER; BEGIN -- Case 1: unsafe mode, rewrite of the whole schema with the same version or some of the files in it -- Case 2: new schema with new version (safe or moderate modes) IF p_pkg_subclass_name = 'basic' THEN SELECT pkg_id INTO l_existing_pkg_id FROM packages WHERE pkg_name = p_pkg_name AND pkg_subclass IN (SELECT pkg_sc_id FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name); ELSE SELECT pkg_id INTO l_existing_pkg_id FROM packages WHERE pkg_name = p_pkg_name AND pkg_subclass IN (SELECT pkg_sc_id FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name) AND pkg_v_major = p_pkg_v_major AND (pkg_v_minor IS NULL OR pkg_v_minor = p_pkg_v_minor) AND (pkg_v_patch IS NULL OR pkg_v_patch = p_pkg_v_patch) AND (pkg_v_pre IS NULL OR pkg_v_pre = p_pkg_v_pre) AND pkg_old_rev IS NULL; END IF; IF FOUND THEN -- Case 1: DELETE FROM package_dependencies WHERE pkg_link_core_id = l_existing_pkg_id; FOREACH l_pkg_dep_id IN ARRAY p_pkg_deps_ids LOOP INSERT INTO package_dependencies (pkg_link_core_id, pkg_link_dep_id) VALUES (l_existing_pkg_id, l_pkg_dep_id); END LOOP; UPDATE packages SET pkg_name=subquery.p_pkg_name, pkg_subclass=subquery.pkg_sc_id, pkg_v_major=subquery.p_pkg_v_major, pkg_v_minor=subquery.p_pkg_v_minor, pkg_v_patch=subquery.p_pkg_v_patch, pkg_v_pre=subquery.p_pkg_v_pre, pkg_v_metadata=subquery.p_pkg_v_metadata, pkg_description=subquery.p_pkg_description, pkg_license=subquery.p_pkg_license FROM (SELECT p_pkg_name, pkg_sc_id, p_pkg_v_major, p_pkg_v_minor, p_pkg_v_patch, p_pkg_v_pre, p_pkg_v_metadata, p_pkg_description, p_pkg_license FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name ) as subquery WHERE packages.pkg_name=subquery.p_pkg_name; INSERT INTO deployment_events (dpl_ev_pkg_id, dpl_ev_txid, dpl_ev_vcs_ref, dpl_ev_vcs_link, dpl_ev_issue_id, dpl_ev_issue_link) VALUES (l_existing_pkg_id, txid_current(), p_pkg_vcs_ref, p_pkg_vcs_link, p_pkg_issue_ref, p_pkg_issue_link); return_value := l_existing_pkg_id; ELSE -- Case 2: INSERT INTO packages ( pkg_name, pkg_description, pkg_v_major, pkg_v_minor, pkg_v_patch, pkg_v_pre, pkg_v_metadata, pkg_subclass, pkg_license ) SELECT p_pkg_name, p_pkg_description, p_pkg_v_major, p_pkg_v_minor, p_pkg_v_patch, p_pkg_v_pre, p_pkg_v_metadata, pkg_sc_id, p_pkg_license FROM package_subclasses WHERE pkg_sc_name = p_pkg_subclass_name RETURNING pkg_id INTO return_value; FOREACH l_pkg_dep_id IN ARRAY p_pkg_deps_ids LOOP INSERT INTO package_dependencies (pkg_link_core_id, pkg_link_dep_id) VALUES (return_value, l_pkg_dep_id); END LOOP; INSERT INTO deployment_events (dpl_ev_pkg_id, dpl_ev_txid, dpl_ev_vcs_ref, dpl_ev_vcs_link, dpl_ev_issue_id, dpl_ev_issue_link) VALUES (return_value, txid_current(), p_pkg_vcs_ref, p_pkg_vcs_link, p_pkg_issue_ref, p_pkg_issue_link); END IF; -- Notify external channels of successful deployment event PERFORM pg_notify('deployment_events' || '$$' || p_pkg_name, p_pkg_v_major || '_' || p_pkg_v_minor || '_' || p_pkg_v_patch); RETURN return_value; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&HۨWW1pgpm/lib/db_scripts/functions/set_search_path.sqlCREATE OR REPLACE FUNCTION set_search_path(p_schema_name TEXT, p_v_req TEXT) RETURNS json AS $BODY$ --- -- @description -- Sets search path that includes also all depending -- -- @param p_schema_name -- Package (schema) name -- -- @param p_v_req -- Package version requirement. Version notation supports: -- - exact version number either in a format 1_2_3 or 01_02_03 (latter format is for formatting purposes) -- - x notation like 01_02_XX or 01_02_xx or 1_2_X or 1_2_x -- - comparison operators like >01_02_03 or <2 -- - x for any latest version of package -- Package name must comply with naming conventions of postgres, exist as schema and be trackable by pgpm in order to satisfy dependency -- -- @returns -- JSON with schema names and exact versions or exception if not found --- DECLARE l_search_path TEXT; l_search_path_deps TEXT; l_listen_text TEXT; l_pkg_version_wrapped RECORD; l_pkg_version RECORD; return_value json; BEGIN SET search_path TO _pgpm; SELECT _find_schema(p_schema_name, p_v_req) AS version INTO l_pkg_version_wrapped; l_pkg_version := l_pkg_version_wrapped.version; l_search_path := l_pkg_version.pkg_name || '_' || l_pkg_version.pkg_v_major::text || '_' || l_pkg_version.pkg_v_minor::text || '_' || l_pkg_version.pkg_v_patch::text; SELECT string_agg(pkg_name || '_' || pkg_v_major || '_' || pkg_v_minor || '_' || pkg_v_patch, ', ') FROM packages WHERE pkg_id IN ( SELECT pkg_link_dep_id from package_dependencies JOIN packages ON pkg_id = l_pkg_version.pkg_id WHERE pkg_link_core_id = pkg_id ) INTO l_search_path_deps; l_search_path := l_search_path || ', ' || l_search_path_deps; l_listen_text := 'deployment_events' || '$$' || l_pkg_version.pkg_name; EXECUTE 'LISTEN ' || l_listen_text; RAISE INFO '%', l_search_path; PERFORM set_config('search_path', l_search_path || ', public', false); return_value := row_to_json(l_pkg_version); RETURN return_value; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&Hf,>{{.pgpm/lib/db_scripts/functions/_find_schema.sqlCREATE OR REPLACE FUNCTION _find_schema(p_schema_name TEXT, p_v_req TEXT) RETURNS RECORD AS $BODY$ --- -- @description -- Searches for existing schema (package) within registered packages in _pgpm schema -- -- @param p_schema_name -- Package (schema) name -- -- @param p_v_req -- Package version requirement. Version notation supports: -- - exact version number either in a format 1_2_3 or 01_02_03 (latter format is for formatting purposes) -- - x notation like 01_02_XX or 01_02_xx or 1_2_X or 1_2_x -- - comparison operators like >01_02_03 or <2 -- - x for any latest version of package -- Package name must comply with naming conventions of postgres, exist as schema and be trackable by pgpm in order to satisfy dependency -- -- @returns -- Record containing schema name and exact version or exception if not found in the following format: -- pkg_name TEXT, pkg_v_major INTEGER, pkg_v_minor INTEGER, pkg_v_patch INTEGER --- DECLARE c_re_version TEXT = '^(<=|>=|<|>{0,2})(\d*|x*)_?(\d*|x*)_?(\d*|x*)'; l_v_matches TEXT[]; l_v_major INTEGER; l_v_minor INTEGER; l_v_patch INTEGER; return_value RECORD; BEGIN SELECT regexp_matches(p_v_req, c_re_version, 'gi') INTO l_v_matches; IF l_v_matches[2] ~* '^x+|^$' THEN SELECT max(pkg_v_major) FROM packages WHERE pkg_name = p_schema_name INTO l_v_major; ELSE l_v_major := l_v_matches[2]::integer; END IF; IF l_v_matches[3] ~* '^x+|^$' THEN SELECT max(pkg_v_minor) FROM packages WHERE pkg_name = p_schema_name AND pkg_v_major = l_v_major INTO l_v_minor; ELSE l_v_minor := l_v_matches[3]::integer; END IF; IF l_v_matches[4] ~* '^x+|^$' THEN SELECT max(pkg_v_patch) FROM packages WHERE pkg_name = p_schema_name AND pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor INTO l_v_patch; ELSE l_v_patch := l_v_matches[4]::integer; END IF; CASE l_v_matches[1] WHEN '=', '' THEN SELECT DISTINCT pkg_id, pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch FROM packages WHERE pkg_name = p_schema_name AND pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor AND pkg_v_patch = l_v_patch INTO return_value; WHEN '<' THEN SELECT DISTINCT pkg_id, pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch FROM packages WHERE pkg_name = p_schema_name AND (pkg_v_major < l_v_major OR (pkg_v_major = l_v_major AND pkg_v_minor < l_v_minor) OR (pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor AND pkg_v_patch < l_v_patch)) INTO return_value; WHEN '>' THEN SELECT DISTINCT pkg_id, pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch FROM packages WHERE pkg_name = p_schema_name AND (pkg_v_major > l_v_major OR (pkg_v_major = l_v_major AND pkg_v_minor > l_v_minor) OR (pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor AND pkg_v_patch > l_v_patch)) INTO return_value; WHEN '<=' THEN SELECT DISTINCT pkg_id, pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch FROM packages WHERE pkg_name = p_schema_name AND (pkg_v_major <= l_v_major OR (pkg_v_major = l_v_major AND pkg_v_minor <= l_v_minor) OR (pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor AND pkg_v_patch <= l_v_patch)) INTO return_value; WHEN '>=' THEN SELECT DISTINCT pkg_id, pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch FROM packages WHERE pkg_name = p_schema_name AND (pkg_v_major >= l_v_major OR (pkg_v_major = l_v_major AND pkg_v_minor >= l_v_minor) OR (pkg_v_major = l_v_major AND pkg_v_minor = l_v_minor AND pkg_v_patch >= l_v_patch)) INTO return_value; ELSE RAISE EXCEPTION 'Invalid logical operand. Only <, >, =, <=, >=, = or no operand are allowed.' USING ERRCODE = '20000'; END CASE; RETURN return_value; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER; PKƉ&H0^^3pgpm/lib/db_scripts/migrations/0.1.4-0.1.5.tmpl.sql/* Migration script from version 0.1.4 to 0.1.5 (or higher if tool doesn't find other migration scripts) */ DO $$ BEGIN BEGIN ALTER TABLE {schema_name}.deployment_events ADD COLUMN dpl_ev_id SERIAL NOT NULL; ALTER TABLE {schema_name}.deployment_events DROP CONSTRAINT IF EXISTS dpl_ev_pkey; ALTER TABLE {schema_name}.deployment_events ADD CONSTRAINT dpl_ev_pkey PRIMARY KEY (dpl_ev_id); EXCEPTION WHEN duplicate_column THEN RAISE NOTICE 'column dpl_ev_id already exists in {schema_name}.deployment_events.'; END; END; $$ PKƉ&HgCHH3pgpm/lib/db_scripts/migrations/0.1.6-0.1.7.tmpl.sql/* Migration script from version 0.1.6 to 0.1.7 (or higher if tool doesn't find other migration scripts) */ ALTER TABLE {schema_name}.packages DROP CONSTRAINT IF EXISTS pkg_unique; ALTER TABLE {schema_name}.packages ADD CONSTRAINT pkg_unique UNIQUE (pkg_name, pkg_v_major, pkg_v_minor, pkg_v_patch, pkg_v_pre, pkg_old_rev);PKƉ&Hӥj113pgpm/lib/db_scripts/migrations/0.0.1-0.0.6.tmpl.sql/* Migration script from version 0.0.1 to 0.0.6 (or higher if tool doesn't find other migration scripts) */ DO $$BEGIN SET search_path TO {schema_name}, public; GRANT USAGE ON SCHEMA {schema_name} TO public; COMMENT ON SCHEMA {schema_name} IS 'Schema containing all information about postgres packages (name, version, dependencies, etc.) and utility functions'; -- drop created and last modified in subclasses, add comment and change varchar to text ALTER TABLE package_subclasses DROP COLUMN pkg_sc_created; ALTER TABLE package_subclasses DROP COLUMN pkg_sc_last_modified; ALTER TABLE package_subclasses ALTER COLUMN pkg_sc_name TYPE TEXT; COMMENT ON TABLE package_subclasses IS 'Subclass of package. Can refer either to versioned schema (that adds suffix at the end) or non-versioned (basic) one (without suffix at the end)'; -- remove vcf reference, created and last modified (will be moved to deployment_events table), add comments, -- change varchar to text and bump version of pgpm to 0.0.6 ALTER TABLE packages DROP COLUMN pkg_created; ALTER TABLE packages DROP COLUMN pkg_last_modified; ALTER TABLE packages DROP COLUMN pkg_vcs_ref; ALTER TABLE packages ALTER COLUMN pkg_name TYPE TEXT; ALTER TABLE packages ALTER COLUMN pkg_v_pre TYPE TEXT; ALTER TABLE packages ALTER COLUMN pkg_v_metadata TYPE TEXT; UPDATE packages SET pkg_v_major = 0, pkg_v_minor = 0, pkg_v_patch = 6 WHERE pkg_name = '{schema_name}'; COMMENT ON TABLE packages IS 'Information about package (schema) tracked with pgpm.'; COMMENT ON COLUMN packages.pkg_v_major IS 'Major part of version of a package as seen in semver'; COMMENT ON COLUMN packages.pkg_v_minor IS 'Minor part of version of a package as seen in semver'; COMMENT ON COLUMN packages.pkg_v_patch IS 'Patch part of version of a package as seen in semver'; COMMENT ON COLUMN packages.pkg_v_pre IS 'Pre part of version of a package as seen in semver'; COMMENT ON COLUMN packages.pkg_v_metadata IS 'Metadata part of version of a package as seen in semver'; COMMENT ON COLUMN packages.pkg_old_rev IS 'Incremental number of the revision of the package of the same version. Used the following way. If package deployed with the version of already existing package in moderate mode then old package is renamed by adding an ending with incremental revision (starting with 0)'; COMMENT ON COLUMN packages.pkg_license IS 'Name of license (or a link) of a package'; -- change Primary key to combination of 2 keys and remove old key ALTER TABLE package_dependencies DROP CONSTRAINT package_dependency_pkey; ALTER TABLE package_dependencies ADD CONSTRAINT package_dependency_pkey PRIMARY KEY (pkg_link_core_id, pkg_link_dep_id); COMMENT ON TABLE package_dependencies IS 'Many to many relationship between dependant packages. Package cannot depend on itself'; CREATE TABLE deployment_events ( dpl_ev_pkg_id INTEGER, dpl_ev_vcs_ref TEXT, dpl_ev_vcs_link TEXT, dpl_ev_time TIMESTAMP DEFAULT NOW(), dpl_ev_issue_id TEXT, dpl_ev_issue_link TEXT, CONSTRAINT dpl_ev_pkg_fkey FOREIGN KEY (dpl_ev_pkg_id) REFERENCES packages (pkg_id) ); COMMENT ON TABLE deployment_events IS 'Table tracks all deployments tracked by pgpm'; COMMENT ON COLUMN deployment_events.dpl_ev_vcs_ref IS 'Reference to VCS point that was installed. In case of git, best option is to put here a specific commit reference. In case of SVN, revision number.'; COMMENT ON COLUMN deployment_events.dpl_ev_vcs_link IS 'Link to VCS repository.'; COMMENT ON COLUMN deployment_events.dpl_ev_time IS 'Deployment time'; COMMENT ON COLUMN deployment_events.dpl_ev_issue_id IS 'ID of an issue in issue tracker. E.g. ABS-111 for JIRA.'; COMMENT ON COLUMN deployment_events.dpl_ev_issue_link IS 'Link to an issue related to this deployment in issue tracker.'; END$$;PKƉ&H@5pgpm/lib/db_scripts/migrations/0.1.12-0.1.12.tmpl.sql/* Migration script from version 0.1.12 to 0.1.12 (or higher if tool doesn't find other migration scripts) */ CREATE TABLE _pgpm.table_evolutions_log ( t_evo_id SERIAL NOT NULL, t_evo_file_name TEXT, t_evo_package INTEGER, CONSTRAINT table_evolutions_log_pkey PRIMARY KEY (t_evo_id), CONSTRAINT package_fkey FOREIGN KEY (t_evo_package) REFERENCES _pgpm.packages (pkg_id) );PKƉ&H~a+v v 5pgpm/lib/db_scripts/migrations/0.1.10-0.1.10.tmpl.sql/* Migration script from version 0.1.10 to 0.1.10 (or higher if tool doesn't find other migration scripts) */ CREATE TABLE IF NOT EXISTS {schema_name}.ddl_changes_log ( ddl_change_id SERIAL NOT NULL, dpl_ev_id INTEGER, ddl_change_user NAME DEFAULT current_user, ddl_change TEXT, ddl_change_txid BIGINT DEFAULT txid_current(), ddl_change_created TIMESTAMP DEFAULT NOW(), CONSTRAINT ddl_change_pkey PRIMARY KEY (ddl_change_id) ); COMMENT ON TABLE {schema_name}.ddl_changes_log IS 'Log of changes of DDL in database linked to deployment events if happened through deployment'; DROP EVENT TRIGGER IF EXISTS ddl_change_trigger; CREATE EVENT TRIGGER ddl_change_trigger ON ddl_command_end WHEN TAG IN ( 'ALTER AGGREGATE', 'ALTER COLLATION', 'ALTER CONVERSION', 'ALTER DOMAIN', 'ALTER EXTENSION', 'ALTER FOREIGN DATA WRAPPER', 'ALTER FOREIGN TABLE', 'ALTER FUNCTION', 'ALTER LANGUAGE', 'ALTER OPERATOR', 'ALTER OPERATOR CLASS', 'ALTER OPERATOR FAMILY', 'ALTER SCHEMA', 'ALTER SEQUENCE', 'ALTER SERVER', 'ALTER TABLE', 'ALTER TEXT SEARCH CONFIGURATION', 'ALTER TEXT SEARCH DICTIONARY', 'ALTER TEXT SEARCH PARSER', 'ALTER TEXT SEARCH TEMPLATE', 'ALTER TRIGGER', 'ALTER TYPE', 'ALTER USER MAPPING', 'ALTER VIEW', 'CREATE AGGREGATE', 'CREATE CAST', 'CREATE COLLATION', 'CREATE CONVERSION', 'CREATE DOMAIN', 'CREATE EXTENSION', 'CREATE FOREIGN DATA WRAPPER', 'CREATE FOREIGN TABLE', 'CREATE FUNCTION', 'CREATE INDEX', 'CREATE LANGUAGE', 'CREATE OPERATOR', 'CREATE OPERATOR CLASS', 'CREATE OPERATOR FAMILY', 'CREATE RULE', 'CREATE SCHEMA', 'CREATE SEQUENCE', 'CREATE SERVER', 'CREATE TABLE', 'CREATE TABLE AS', 'CREATE TEXT SEARCH CONFIGURATION', 'CREATE TEXT SEARCH DICTIONARY', 'CREATE TEXT SEARCH PARSER', 'CREATE TEXT SEARCH TEMPLATE', 'CREATE TRIGGER', 'CREATE TYPE', 'CREATE USER MAPPING', 'CREATE VIEW', 'DROP AGGREGATE', 'DROP CAST', 'DROP COLLATION', 'DROP CONVERSION', 'DROP DOMAIN', 'DROP EXTENSION', 'DROP FOREIGN DATA WRAPPER', 'DROP FOREIGN TABLE', 'DROP FUNCTION', 'DROP INDEX', 'DROP LANGUAGE', 'DROP OPERATOR', 'DROP OPERATOR CLASS', 'DROP OPERATOR FAMILY', 'DROP OWNED', 'DROP RULE', 'DROP SCHEMA', 'DROP SEQUENCE', 'DROP SERVER', 'DROP TABLE', 'DROP TEXT SEARCH CONFIGURATION', 'DROP TEXT SEARCH DICTIONARY', 'DROP TEXT SEARCH PARSER', 'DROP TEXT SEARCH TEMPLATE', 'DROP TRIGGER', 'DROP TYPE', 'DROP USER MAPPING', 'DROP VIEW') EXECUTE PROCEDURE {schema_name}._log_ddl_change();PKƉ&Hť5pgpm/lib/db_scripts/migrations/0.1.18-0.1.18.tmpl.sql/* Migration script from version 0.1.18 to 0.1.18 (or higher if tool doesn't find other migration scripts) */ COMMENT ON TABLE _pgpm.table_evolutions_log IS 'Table tracks all table evolution statements (ALTER TABLE + DML) for pgpm packages'; COMMENT ON COLUMN _pgpm.table_evolutions_log.t_evo_file_name IS 'File name acts as a key to check whether evolution has already been applied or not';PKƉ&Hp5pgpm/lib/db_scripts/migrations/0.1.11-0.1.11.tmpl.sql/* Migration script from version 0.1.11 to 0.1.11 (or higher if tool doesn't find other migration scripts) */ ALTER TABLE _pgpm.ddl_changes_log DROP COLUMN dpl_ev_id; ALTER TABLE _pgpm.deployment_events ADD COLUMN dpl_ev_txid BIGINT; PKƉ&H=63pgpm/lib/db_scripts/migrations/0.0.7-0.1.3.tmpl.sql/* Migration script from version 0.7.0 to 0.1.3 (or higher if tool doesn't find other migration scripts) */ DROP FUNCTION IF EXISTS {schema_name}._add_package_info(TEXT, TEXT, INTEGER, INTEGER, INTEGER, INTEGER, TEXT, TEXT, TEXT, TEXT, INTEGER[], TEXT); CREATE TABLE IF NOT EXISTS {schema_name}.migrations_log ( m_id SERIAL NOT NULL, m_low_v TEXT, m_high_v TEXT, m_created TIMESTAMP DEFAULT NOW(), CONSTRAINT migrations_log_pkey PRIMARY KEY (m_id) ); INSERT INTO {schema_name}.migrations_log (m_low_v, m_high_v) SELECT '0.0.1', '0.0.6' WHERE NOT EXISTS ( SELECT m_low_v, m_high_v FROM {schema_name}.migrations_log WHERE m_low_v = '0.0.1' AND m_high_v = '0.0.6' ); COMMENT ON TABLE {schema_name}.migrations_log IS 'Logs each migration of pgpm to newer version. TODO: add statuses'; ALTER TABLE {schema_name}.deployment_events DROP CONSTRAINT IF EXISTS dpl_ev_pkey; ALTER TABLE {schema_name}.deployment_events ADD CONSTRAINT dpl_ev_pkey PRIMARY KEY (dpl_ev_pkg_id, dpl_ev_time);PKƉ&H5pgpm/lib/db_scripts/migrations/0.1.19-0.1.33.tmpl.sql/* Migration script from version 0.1.19 to 0.1.33 (or higher if tool doesn't find other migration scripts) */ ALTER TABLE _pgpm.ddl_changes_log ALTER COLUMN ddl_change_user SET DEFAULT session_user;PKƉ&H8 #3pgpm/lib/db_scripts/migrations/0.1.8-0.1.9.tmpl.sql/* Migration script from version 0.1.8 to 0.1.9 (or higher if tool doesn't find other migration scripts) */ CREATE TABLE IF NOT EXISTS {schema_name}.package_statuses ( pkg_s_id SERIAL NOT NULL, pkg_s_name TEXT, CONSTRAINT pkg_s_pkey PRIMARY KEY (pkg_s_id) ); COMMENT ON TABLE {schema_name}.package_statuses IS 'Package statuses'; INSERT INTO {schema_name}.package_statuses (pkg_s_id, pkg_s_name) SELECT 1, 'ADDED' WHERE NOT EXISTS ( SELECT pkg_s_id, pkg_s_name FROM {schema_name}.package_statuses WHERE pkg_s_id = 1 ); INSERT INTO {schema_name}.package_statuses (pkg_s_id, pkg_s_name) SELECT 2, 'IN PROGRESS' WHERE NOT EXISTS ( SELECT pkg_s_id, pkg_s_name FROM {schema_name}.package_statuses WHERE pkg_s_id = 2 ); INSERT INTO {schema_name}.package_statuses (pkg_s_id, pkg_s_name) SELECT 3, 'DELETED' WHERE NOT EXISTS ( SELECT pkg_s_id, pkg_s_name FROM {schema_name}.package_statuses WHERE pkg_s_id = 3 ); ALTER TABLE {schema_name}.packages ADD COLUMN pkg_status INTEGER DEFAULT 1; ALTER TABLE {schema_name}.packages ADD CONSTRAINT pkg_status_fkey FOREIGN KEY (pkg_status) REFERENCES {schema_name}.package_statuses (pkg_s_id);PKƉ&HE5pgpm/lib/db_scripts/migrations/0.1.13-0.1.17.tmpl.sql/* Migration script from version 0.1.13 to 0.1.17 (or higher if tool doesn't find other migration scripts) */ ALTER TABLE _pgpm.table_evolutions_log ADD COLUMN t_evo_created TIMESTAMP DEFAULT NOW(); PKɅrH pgpm/utils/config.pyimport json import os import pgpm.lib.utils.db import psycopg2 import psycopg2.extras class GlobalConfiguration(object): """ stores properties of schema configuration """ description = "" license = "" owner_role = "" user_roles = [] def __init__(self, default_config_path='~/.pgpmconfig', extra_config_path=None): """ populates properties with config data """ global_config_dict = None default_config = None extra_config = None if default_config_path: default_config_full_path = os.path.abspath(os.path.expanduser(default_config_path)) if os.path.isfile(default_config_full_path): default_config_file = open(default_config_full_path) default_config = json.load(default_config_file) default_config_file.close() if extra_config_path: extra_config_full_path = os.path.abspath(os.path.expanduser(extra_config_path)) if os.path.isfile(extra_config_full_path): extra_config_file = open(extra_config_full_path) extra_config = json.load(extra_config_file) extra_config_file.close() if default_config and extra_config: global_config_dict = dict(default_config, **extra_config) elif default_config: global_config_dict = default_config elif extra_config: global_config_dict = extra_config self.global_config_dict = global_config_dict self.connection_sets = [] if self.global_config_dict: if 'connection_sets' in self.global_config_dict: for item in self.global_config_dict['connection_sets']: if item['type'] == 'RESDB': conn = psycopg2.connect(item['connection_string'], connection_factory=pgpm.lib.utils.db.MegaConnection) cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) cur.execute(item['payload']) result_tuple = cur.fetchall() self.connection_sets = self.connection_sets + result_tuple cur.close() conn.close() if item['type'] == 'LIST': self.connection_sets = self.connection_sets + item['payload'] def get_list_connections(self, environment, product, unique_name_list=None, is_except=False): """ Gets list of connections that satisfy the filter by environment, product and (optionally) unique DB names :param environment: Environment name :param product: Product name :param unique_name_list: list of unique db aliases :param is_except: take the connections with aliases provided or, the other wat around, take all the rest :return: list of dictionaries with connections """ return_list = [] for item in self.connection_sets: if unique_name_list: if item['unique_name']: if is_except: if item['environment'] == environment and item['product'] == product and \ (item['unique_name'] not in unique_name_list): return_list.append(item) elif not is_except: if item['environment'] == environment and item['product'] == product and \ (item['unique_name'] in unique_name_list): return_list.append(item) else: if item['environment'] == environment and item['product'] == product: return_list.append(item) return return_list PKƉ&Hpgpm/utils/__init__.pyPKƉ&H$^pgpm/utils/issue_trackers.pyimport json import logging import requests import requests.auth import os class Jira: """ Manages connection to JIRA """ def __init__(self, base_url="localhost", logger=None): """ populates properties with config data """ self._logger = logger or logging.getLogger(__name__) self.base_url = base_url def call_jira_rest(self, url, user, password, method="GET", data=None): """ Make JIRA REST call :param data: data for rest call :param method: type of call: GET or POST for now :param url: url to call :param user: user for authentication :param password: password for authentication :return: """ headers = {'content-type': 'application/json'} self._logger.debug('Connecting to Jira to call the following REST method {0}'.format(url)) if method == "GET": response = requests.get(self.base_url + url, auth=requests.auth.HTTPBasicAuth(user, password)) elif method == "POST": response = requests.post(self.base_url + url, data=json.dumps(data), auth=requests.auth.HTTPBasicAuth(user, password), headers=headers) else: raise ValueError('method argument supports GET or POST values only') self._logger.debug('REST call successfully finalised') return response.json()PKSIwH%pgpm-0.1.58.dist-info/DESCRIPTION.rstpgpm |build-status| =================== ``pgpm`` is a package manager for Postgres database. It provides support for schema deploying and schema versioning. TODO: provide support for DDL evolutions and dependency management. Visit the `project page `_ for additional information and documentation. Installation ------------ ``pip install pgpm`` .. |build-status| image:: https://travis-ci.org/affinitas/pgpm.svg?branch=develop :target: https://travis-ci.org/affinitas/pgpm :alt: Build status PKSIwHWn((&pgpm-0.1.58.dist-info/entry_points.txt[console_scripts] pgpm = pgpm.app:main PKSIwH֔#pgpm-0.1.58.dist-info/metadata.json{"classifiers": ["Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Topic :: Software Development :: Build Tools", "Topic :: Utilities", "Topic :: Database"], "extensions": {"python.commands": {"wrap_console": {"pgpm": "pgpm.app:main"}}, "python.details": {"contacts": [{"email": "artem.panchoyan@gmail.com", "name": "Artem Panchoyan", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/affinitas/pgpm"}}, "python.exports": {"console_scripts": {"pgpm": "pgpm.app:main"}}}, "extras": [], "generator": "bdist_wheel (0.26.0)", "keywords": ["postgres", "database", "package", "deploying"], "license": "MIT", "metadata_version": "2.0", "name": "pgpm", "run_requires": [{"requires": ["chardet", "colorama", "docopt", "psycopg2", "requests", "sqlparse"]}], "summary": "Postgres package manager", "test_requires": [{"requires": ["tox"]}], "version": "0.1.58"}PKSIwHLv#pgpm-0.1.58.dist-info/top_level.txtpgpm PKSIwHndnnpgpm-0.1.58.dist-info/WHEELWheel-Version: 1.0 Generator: bdist_wheel (0.26.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any PKSIwH! pgpm-0.1.58.dist-info/METADATAMetadata-Version: 2.0 Name: pgpm Version: 0.1.58 Summary: Postgres package manager Home-page: https://github.com/affinitas/pgpm Author: Artem Panchoyan Author-email: artem.panchoyan@gmail.com License: MIT Keywords: postgres database package deploying Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Environment :: Console Classifier: Intended Audience :: Developers Classifier: Intended Audience :: System Administrators Classifier: Programming Language :: Python Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: MacOS Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: POSIX Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: Utilities Classifier: Topic :: Database Requires-Dist: chardet Requires-Dist: colorama Requires-Dist: docopt Requires-Dist: psycopg2 Requires-Dist: requests Requires-Dist: sqlparse pgpm |build-status| =================== ``pgpm`` is a package manager for Postgres database. It provides support for schema deploying and schema versioning. TODO: provide support for DDL evolutions and dependency management. Visit the `project page `_ for additional information and documentation. Installation ------------ ``pip install pgpm`` .. |build-status| image:: https://travis-ci.org/affinitas/pgpm.svg?branch=develop :target: https://travis-ci.org/affinitas/pgpm :alt: Build status PKSIwH ?u++pgpm-0.1.58.dist-info/RECORDpgpm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pgpm/app.py,sha256=n7aLH4WvtmwxjF2TMY30a5KmTZPf2O88bD0rT2UoB4I,31945 pgpm/settings.py,sha256=l11kC424M6p1YqyLouuNoalqGXIvUOA2y24JSEx3stM,391 pgpm/lib/__init__.py,sha256=-dbyijL5pxC87H89yQ89CM0v7vzg8p5bhOCcev2tcVg,26 pgpm/lib/abstract_deploy.py,sha256=f0r9aqUzJ4vrBkj3OqWmC83rnadPS9dmfTbie8zcfjo,1293 pgpm/lib/deploy.py,sha256=xNhfcdE5JalP_MnpoMm7VfS4qT9v2EWE1yKr_Mxkbk0,30421 pgpm/lib/execute.py,sha256=BJHwnlMfwXPkOfNEH9dIR9nl2-OciGVHYR4gvciO5Zk,3256 pgpm/lib/install.py,sha256=AqACRTUvImi8jNqwCWPa3WT6VTLJeOcvZDSOI8AOhVg,10972 pgpm/lib/version.py,sha256=9__JJwT4zcZiYXPn0K6Z3DtfRvDsXfHEoMpIIZnqqco,23 pgpm/lib/db_scripts/deploy_prepare_config.sql,sha256=AtiANYO-6gcfsWuZH_c8qLVRPf0GDL8yTz0z9DWmnU8,243 pgpm/lib/db_scripts/install.tmpl.sql,sha256=Rx34kkYbV28FxGRgF9i-lcEkjspV1C6Ou_UNf3NO89k,1835 pgpm/lib/db_scripts/functions/_add_migration_info.sql,sha256=WnYiR7PME9oyZbVEZXN8gmOaNVITVSExI_CxaP8gsOk,833 pgpm/lib/db_scripts/functions/_alter_schema_owner.sql,sha256=hG-p1-FEfUYBA8fNQsc0Zgf9pdpifvIY4AVAxRfZiD0,3123 pgpm/lib/db_scripts/functions/_find_schema.sql,sha256=yvdb3VUrgx4imCRsvfRQgNMjy7mJYMvoGSJ6SemMngU,4731 pgpm/lib/db_scripts/functions/_is_table_ddl_executed.sql,sha256=q_qXF53aOqT3w8MlzVHiyUBmZ1y1dN5663fBb6lhH1Y,2131 pgpm/lib/db_scripts/functions/_log_ddl_change.sql,sha256=mDw6Ybo8Qta0ik4VD7-wcb491ychewcwXcWUja0Ua28,720 pgpm/lib/db_scripts/functions/_log_table_evolution.sql,sha256=7laqwvByXsaVs5Gsjmb8MZHEhA1CX-KxdGa1CFwDufQ,623 pgpm/lib/db_scripts/functions/_set_revision_package.sql,sha256=K4rwaOGSdBo10OLqX9qLy-yHgAhmdhyq2waZK70zwck,3173 pgpm/lib/db_scripts/functions/_upsert_package_info.sql,sha256=C_XThzEpHO8GT9Sr8BAwiyBmK7wMjJXRxtwhQHfjWaI,6417 pgpm/lib/db_scripts/functions/drop_schema_smart.sql,sha256=hs3bum-njl6OZKqBWXMkfAUvZvjJv5EIANIa3hXebiM,4199 pgpm/lib/db_scripts/functions/set_search_path.sql,sha256=V3gpVsQlVaZnpzvJ_KYEd1yHiDZxxlPvfq_HHlmtRNo,2135 pgpm/lib/db_scripts/migrations/0.0.1-0.0.6.tmpl.sql,sha256=g2Q0RF5qRmG2Yf8cwAIYPzoGTLgdj6mH7Nvto1NZ2ZY,4145 pgpm/lib/db_scripts/migrations/0.0.7-0.1.3.tmpl.sql,sha256=pnFF9GmgvbaUUGuYl4bNd4tenEpKXNmF0UICZxfSDm4,1031 pgpm/lib/db_scripts/migrations/0.1.10-0.1.10.tmpl.sql,sha256=ko6vBEHYf6IokEtheayavoSesqnz2a9fhjIolcSNDmc,2422 pgpm/lib/db_scripts/migrations/0.1.11-0.1.11.tmpl.sql,sha256=qAPxdnFDuAUZKbkTay42x8uSIjxq3UY2wUcAcuJ3z_o,240 pgpm/lib/db_scripts/migrations/0.1.12-0.1.12.tmpl.sql,sha256=Fvq_6MWG7HjFR91Oda0SUeFIsskZqVq_FUqBjsGg4y0,399 pgpm/lib/db_scripts/migrations/0.1.13-0.1.17.tmpl.sql,sha256=cbxynm8y3EKr6DW0YN4g-yeonpODNRRoDIhAps4pINs,205 pgpm/lib/db_scripts/migrations/0.1.18-0.1.18.tmpl.sql,sha256=78rHp4JT97jKW5GKhVc8ahh7UYSOeXKdqIx_LzVSRtA,404 pgpm/lib/db_scripts/migrations/0.1.19-0.1.33.tmpl.sql,sha256=v9K0LSEVE5IeMp9PFNsd3tRdHCJCEvfnGHlThQxsIr0,204 pgpm/lib/db_scripts/migrations/0.1.4-0.1.5.tmpl.sql,sha256=h-IEyKNSPC3nHn_vHI-hdn2HpRdRNNB6nlDqrzXEoyU,606 pgpm/lib/db_scripts/migrations/0.1.6-0.1.7.tmpl.sql,sha256=e6s-R72Lac3Kn_PSEksR3YQrpaCKewlTN6TIfDa8RV4,328 pgpm/lib/db_scripts/migrations/0.1.8-0.1.9.tmpl.sql,sha256=MX6E0L-fU0ZN0VTaKLuAmi1L_G0wPYk_zwpAWxW2RYY,1221 pgpm/lib/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pgpm/lib/utils/config.py,sha256=kWzyHnhsO6o6VfuabFc8NagoErt9dbxeKeK0r4sAzJo,7775 pgpm/lib/utils/db.py,sha256=9CCxTMJHg_o5IF1XGhsh9cXuTwAA5o-Vek6OE62QZIM,8081 pgpm/lib/utils/misc.py,sha256=rky339iiy7uqdwPr-j8Mc2ItY2cWb7I-SoGyZs0K34E,4171 pgpm/lib/utils/vcs.py,sha256=gQgzaqFCvfy08PGm3x29WdmqZSbGE2APSwNih1gkgCA,1094 pgpm/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pgpm/utils/config.py,sha256=DjlM2TT_4_9hrIa8R-IHjAn9QB6ipOLooGLHUTHkGSk,3844 pgpm/utils/issue_trackers.py,sha256=b9K5YcXhrTfii8gW9krAVRYiX3nTKSki8m_ClysHZSI,1426 pgpm-0.1.58.dist-info/DESCRIPTION.rst,sha256=YbiQ_n1oNjs9esKTJq4AXSP9Uq7OCKgXJSQtJFhBy34,536 pgpm-0.1.58.dist-info/METADATA,sha256=WgSAEIkD3HkN09I3ee2kv9prRnl8fzR0g67zxEbG_Mg,1466 pgpm-0.1.58.dist-info/RECORD,, pgpm-0.1.58.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 pgpm-0.1.58.dist-info/entry_points.txt,sha256=cGyLRWOdn8z42zl4R1hPERDuo5EtA4hm2GHyJeBhJ4E,40 pgpm-0.1.58.dist-info/metadata.json,sha256=pamZLkqmM_Idw14DJWjG713rzeJSET1QoOJN8wu5_bw,1189 pgpm-0.1.58.dist-info/top_level.txt,sha256=UMmkV4p1l5RKjvJCyK-miOLK7ZQWP7DFvVAxMsymbas,5 PKƉ&HME$pgpm/settings.pyPKɅrHÌ|| pgpm/app.pyPKƉ&H~pgpm/__init__.pyPKnuH1~pgpm/lib/version.pyPKɅrH**pgpm/lib/install.pyPKɅrHvv*pgpm/lib/deploy.pyPKƉ&Hx/!pgpm/lib/__init__.pyPKɅrH  {!pgpm/lib/abstract_deploy.pyPKɅrHsϜ &pgpm/lib/execute.pyPKɅrH9]j3pgpm/lib/utils/db.pyPKƉ&H`__mSpgpm/lib/utils/config.pyPKɅrH}!%KKrpgpm/lib/utils/misc.pyPKɅrHpgpm/lib/utils/__init__.pyPKƉ&Hp`FFpgpm/lib/utils/vcs.pyPKƉ&H*-2pgpm/lib/db_scripts/deploy_prepare_config.sqlPKƉ&Hk0|++$ppgpm/lib/db_scripts/install.tmpl.sqlPKƉ&H1AA5ݏpgpm/lib/db_scripts/functions/_add_migration_info.sqlPKƉ&H(ae e 7qpgpm/lib/db_scripts/functions/_set_revision_package.sqlPKƉ&HRoo6+pgpm/lib/db_scripts/functions/_log_table_evolution.sqlPKnuHvgg3pgpm/lib/db_scripts/functions/drop_schema_smart.sqlPKɅrH`J3 3 5pgpm/lib/db_scripts/functions/_alter_schema_owner.sqlPKƉ&H!Q~1,pgpm/lib/db_scripts/functions/_log_ddl_change.sqlPKɅrHSS8Kpgpm/lib/db_scripts/functions/_is_table_ddl_executed.sqlPKƉ&H6pgpm/lib/db_scripts/functions/_upsert_package_info.sqlPKƉ&HۨWW1Ypgpm/lib/db_scripts/functions/set_search_path.sqlPKƉ&Hf,>{{.pgpm/lib/db_scripts/functions/_find_schema.sqlPKƉ&H0^^3pgpm/lib/db_scripts/migrations/0.1.4-0.1.5.tmpl.sqlPKƉ&HgCHH3upgpm/lib/db_scripts/migrations/0.1.6-0.1.7.tmpl.sqlPKƉ&Hӥj113pgpm/lib/db_scripts/migrations/0.0.1-0.0.6.tmpl.sqlPKƉ&H@5pgpm/lib/db_scripts/migrations/0.1.12-0.1.12.tmpl.sqlPKƉ&H~a+v v 5rpgpm/lib/db_scripts/migrations/0.1.10-0.1.10.tmpl.sqlPKƉ&Hť5;!pgpm/lib/db_scripts/migrations/0.1.18-0.1.18.tmpl.sqlPKƉ&Hp5"#pgpm/lib/db_scripts/migrations/0.1.11-0.1.11.tmpl.sqlPKƉ&H=63e$pgpm/lib/db_scripts/migrations/0.0.7-0.1.3.tmpl.sqlPKƉ&H5(pgpm/lib/db_scripts/migrations/0.1.19-0.1.33.tmpl.sqlPKƉ&H8 #3)pgpm/lib/db_scripts/migrations/0.1.8-0.1.9.tmpl.sqlPKƉ&HE5.pgpm/lib/db_scripts/migrations/0.1.13-0.1.17.tmpl.sqlPKɅrH 0pgpm/utils/config.pyPKƉ&HH?pgpm/utils/__init__.pyPKƉ&H$^|?pgpm/utils/issue_trackers.pyPKSIwH%HEpgpm-0.1.58.dist-info/DESCRIPTION.rstPKSIwHWn((&Gpgpm-0.1.58.dist-info/entry_points.txtPKSIwH֔#Hpgpm-0.1.58.dist-info/metadata.jsonPKSIwHLv#Lpgpm-0.1.58.dist-info/top_level.txtPKSIwHndnn;Mpgpm-0.1.58.dist-info/WHEELPKSIwH! Mpgpm-0.1.58.dist-info/METADATAPKSIwH ?u++Spgpm-0.1.58.dist-info/RECORDPK//J=e