From 4da03285a661e8dddeb5eea63e0b410d1d410992 Mon Sep 17 00:00:00 2001 From: Steffen Schumacher Date: Wed, 9 Sep 2020 14:05:49 +0200 Subject: [PATCH 01/37] Port to py3.6 fixes #1252 Move to newest or replacement libs Fix syntax to 3.6+ Overhaul strings to optimize performance and readability Update build stuff to 3.6 + move docker img to 18.04/bionic --- Dockerfile.nipapd | 29 +- ca_certs/README.rst | 8 + nipap/MANIFEST.in | 2 + nipap/Makefile | 4 +- nipap/debian/control | 6 +- nipap/entrypoint.sh | 5 +- nipap/nipap-passwd | 78 +-- nipap/nipap/authlib.py | 80 ++- nipap/nipap/backend.py | 964 +++++++++++++---------------------- nipap/nipap/daemon.py | 309 +++++------ nipap/nipap/errors.py | 1 - nipap/nipap/nipapconfig.py | 14 +- nipap/nipap/smart_parsing.py | 576 ++++++++++----------- nipap/nipap/xmlrpc.py | 369 ++++++-------- nipap/nipapd | 83 +-- nipap/requirements.txt | 11 +- nipap/setup.py | 84 +-- nipap/wait-for-it.sh | 178 +++++++ nipap/xml-test.py | 162 +++--- nipap/xmlbench.py | 111 ++-- tests/nipapbase.py | 1 + 21 files changed, 1470 insertions(+), 1605 deletions(-) create mode 100644 ca_certs/README.rst create mode 100755 nipap/wait-for-it.sh diff --git a/Dockerfile.nipapd b/Dockerfile.nipapd index 14d7fe265..0b7a91fe1 100644 --- a/Dockerfile.nipapd +++ b/Dockerfile.nipapd @@ -32,7 +32,7 @@ # via a volume. # -FROM ubuntu:xenial +FROM ubuntu:bionic MAINTAINER Kristian Larsson ENV DEBIAN_FRONTEND=noninteractive @@ -44,19 +44,30 @@ RUN apt-get update -qy && apt-get upgrade -qy \ libpq-dev \ libsqlite3-dev \ postgresql-client \ - python \ - python-all \ - python-docutils \ - python-pip \ - python-dev \ - && pip --no-input install envtpl \ + software-properties-common \ + python3 \ + python3-all \ + python3-pip \ + python3-dev \ + libsasl2-dev \ + libldap2-dev \ + libssl-dev \ && rm -rf /var/lib/apt/lists/* +# Install any additional CA certs from ca_certs folder required by corp proxies etc +RUN mkdir -p /usr/share/ca-certificates/extra +COPY ca_certs/*.crt /usr/share/ca-certificates/extra/ +RUN ls /usr/share/ca-certificates/extra/*.crt | sed 's/\/usr\/share\/ca-certificates\///g' >> /etc/ca-certificates.conf +RUN update-ca-certificates +RUN pip3 install --upgrade pip +RUN pip3 config set global.cert /etc/ssl/certs/ca-certificates.crt + COPY nipap /nipap WORKDIR /nipap -RUN pip --no-input install -r requirements.txt \ - && python setup.py install +RUN pip3 install --no-input envtpl +RUN pip3 --no-input install -r requirements.txt \ + && python3 setup.py install EXPOSE 1337 ENV LISTEN_ADDRESS=0.0.0.0 LISTEN_PORT=1337 SYSLOG=false DB_PORT=5432 DB_SSLMODE=disable diff --git a/ca_certs/README.rst b/ca_certs/README.rst new file mode 100644 index 000000000..94d3a7549 --- /dev/null +++ b/ca_certs/README.rst @@ -0,0 +1,8 @@ +Custom CA Certs for docker containers +===================================== +If you need to include specific CA certs which you must trust, place them here +in PEM format, named \*.crt. + +This may be required if you need to build the container from inside a network +which uses a proxy or similar, or other dependencies towards internal services +are included in your containers. \ No newline at end of file diff --git a/nipap/MANIFEST.in b/nipap/MANIFEST.in index 796565ce8..9720f598a 100644 --- a/nipap/MANIFEST.in +++ b/nipap/MANIFEST.in @@ -1 +1,3 @@ include README.rst MANIFEST.in +include *.man.rst +include requirements.txt diff --git a/nipap/Makefile b/nipap/Makefile index 97c7b65e5..b3584fa88 100644 --- a/nipap/Makefile +++ b/nipap/Makefile @@ -1,7 +1,7 @@ # $Id: Makefile,v 1.6 2011/04/18 17:14:00 lukagarb Exp $ # -PYTHON=`which python2` +PYTHON=`which python3` DESTDIR=/ BUILDIR=$(CURDIR)/debian/python-nipap-build PROJECT=nipap @@ -38,7 +38,7 @@ test: check: upload: - python setup.py sdist upload + python3 setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/nipap/debian/control b/nipap/debian/control index 36ae12f89..72dd0ddc4 100644 --- a/nipap/debian/control +++ b/nipap/debian/control @@ -2,13 +2,13 @@ Source: nipap Maintainer: Lukas Garberg Section: python Priority: optional -Build-Depends: python (>= 2.7), debhelper (>= 7.4.3) +Build-Depends: python (>= 3.6), debhelper (>= 7.4.3) Standards-Version: 4.4.0 Package: nipap-common Architecture: all -Depends: python (>= 2.7), ${misc:Depends}, python-pysqlite2, python-ipy +Depends: python (>= 3.6), ${misc:Depends}, python-pysqlite2, python-ipy Suggests: python-ldap Description: Neat IP Address Planner The Neat IP Address Planner, NIPAP, is a system built for efficiently managing @@ -17,7 +17,7 @@ Description: Neat IP Address Planner Package: nipapd Architecture: all -Depends: debconf, nipap-common, python (>= 2.7), ${misc:Depends}, python-psycopg2, python-flask, python-flask-xml-rpc, python-flask-restful, python-flask-compress, python-tornado, python-parsedatetime, python-tz, python-dateutil, python-psutil, python-pyparsing, python-jwt, python-requests +Depends: debconf, nipap-common, python (>= 3.6), ${misc:Depends}, python-psycopg2, python-flask, python-flask-xml-rpc, python-flask-restful, python-flask-compress, python-tornado, python-parsedatetime, python-tz, python-dateutil, python-psutil, python-pyparsing, python-jwt, python-requests Description: Neat IP Address Planner XML-RPC daemon The Neat IP Address Planner, NIPAP, is a system built for efficiently managing large amounts of IP addresses. This is the XML-RPC daemon. diff --git a/nipap/entrypoint.sh b/nipap/entrypoint.sh index bae261919..fdd4b4c24 100755 --- a/nipap/entrypoint.sh +++ b/nipap/entrypoint.sh @@ -1,11 +1,12 @@ #!/bin/sh -envtpl --allow-missing /nipap/nipap.conf.dist -o /etc/nipap/nipap.conf +envtpl --allow-missing --keep-template /nipap/nipap.conf.dist -o /etc/nipap/nipap.conf +/bin/bash /nipap/wait-for-it.sh -t 60 $DB_HOST:$DB_PORT -- sleep 5 /usr/sbin/nipap-passwd create-database if [ -n "$NIPAP_USERNAME" -a -n "$NIPAP_PASSWORD" ]; then echo "Creating user '$NIPAP_USERNAME'" /usr/sbin/nipap-passwd add --username $NIPAP_USERNAME --name "NIPAP user" --password $NIPAP_PASSWORD fi - +echo "Starting nipap daemon.." exec /usr/sbin/nipapd --debug --foreground --auto-install-db --auto-upgrade-db --no-pid-file diff --git a/nipap/nipap-passwd b/nipap/nipap-passwd index ab70256f2..2be2cd529 100755 --- a/nipap/nipap-passwd +++ b/nipap/nipap-passwd @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Manages NIPAP LocalAuth authentication database # @@ -16,10 +16,11 @@ if __name__ == '__main__': # parse arguments parser = argparse.ArgumentParser(description='NIPAP User configuration') parser.add_argument('action', - metavar='{list, add, delete, modify, test-user, latest-version, create-database, upgrade-database}', - nargs='?', type=str, - choices=['list', 'add', 'delete', 'modify', 'test-user', 'latest-version', 'create-database', 'upgrade-database'], - help='define an action to execute') + metavar='{list, add, delete, modify, test-user, latest-version, create-database, upgrade-database}', + nargs='?', type=str, + choices=['list', 'add', 'delete', 'modify', 'test-user', 'latest-version', 'create-database', + 'upgrade-database'], + help='define an action to execute') parser.add_argument('-u', '--username', dest='user', type=str, help='username') parser.add_argument('-p', '--password', dest='password', type=str, @@ -38,7 +39,7 @@ if __name__ == '__main__': help="database file [default: read from config]") parser.add_argument('-c', '--config', dest='config', default='/etc/nipap/nipap.conf', type=str, help= - 'read configuration from CONFIG [default:/etc/nipap/nipap.conf]') + 'read configuration from CONFIG [default:/etc/nipap/nipap.conf]') parser.add_argument('--version', action='version', version='nipap-passwd version %s' % nipap.__version__) args = parser.parse_args() @@ -52,8 +53,8 @@ if __name__ == '__main__': try: cfg = NipapConfig(args.config) - except NipapConfigError, exc: - print >> sys.stderr, "The specified configuration file ('" + args.config + "') does not exist" + except NipapConfigError as exc: + print("The specified configuration file ('" + args.config + "') does not exist", file=sys.stderr) sys.exit(1) if args.db_file: @@ -63,65 +64,69 @@ if __name__ == '__main__': if args.action == 'list': # print a nicely formatted list of users - header = "%-20s %-25s %-7s %-7s" % ('username', 'real name', 'trusted', 'read only') - print "%s\n%s" % (header,''.join('-' for x in range(len(header)))) + header = "{:<20} {:<25} {:<7} {:<7}".format('username', 'real name', 'trusted', 'read only') + print("{}\n{}".format(header, ''.join('-' for x in range(len(header))))) for u in a.list_users(): if not args.user or args.user == u['username']: - print "%-20s %-25s %-7d %-7d" % (u['username'], u['full_name'], int(u['trusted']), int(u['readonly'])) + print("%-20s %-25s %-7d %-7d" % (u['username'], u['full_name'], int(u['trusted']), int(u['readonly']))) elif args.action == 'test-user': if not args.user: - print "Please specify user with --user" + print("Please specify user with --user") sys.exit(1) if not args.password: - print "Please specify password with --password" + print("Please specify password with --password") sys.exit(1) af = nipap.authlib.AuthFactory() auth = af.get_auth(args.user, args.password, "nipap", {}) if not auth.authenticate(): - print "The username or password seems to be wrong" + print("The username or password seems to be wrong") sys.exit(2) - print "Username and password seem to be correct" + print("Username and password seem to be correct") sys.exit(0) - + elif args.action == 'add': if not args.user: - print "Please specify user with --user" + print("Please specify user with --user") sys.exit(1) if not args.password: - print "Please specify password with --password" + print("Please specify password with --password") sys.exit(1) if not args.name: - print "Please specify name with --name" + print("Please specify name with --name") sys.exit(1) try: a.add_user(args.user, args.password, args.name, args.trusted, args.readonly) - print "Added user %s to database %s" % (args.user, cfg.get('auth.backends.local','db_path')) + print("Added user {} to database {}".format(args.user, cfg.get('auth.backends.local', 'db_path'))) except nipap.authlib.AuthError as exc: if str(exc) == 'attempt to write a readonly database': - print "You do not have sufficient rights to write to database: %s" % (cfg.get('auth.backends.local','db_path')) + print("You do not have sufficient rights to write to database: %s" % ( + cfg.get('auth.backends.local', 'db_path'))) elif str(exc) == 'column username is not unique': - print "Username '%s' already exists in the database: %s " % (args.user, cfg.get('auth.backends.local','db_path')) + print("Username '{}' already exists in the database: {} ".format(args.user, + cfg.get('auth.backends.local', + 'db_path'))) else: - print exc + print(exc) elif args.action == 'delete': try: if not args.user: - print "Please specify user with --user" + print("Please specify user with --user") sys.exit(1) a.remove_user(args.user) - print "User %s deleted from database %s" % (args.user, cfg.get('auth.backends.local', 'db_path')) + print("User {} deleted from database {}".format(args.user, cfg.get('auth.backends.local', 'db_path'))) except nipap.authlib.AuthError as exc: if str(exc) == 'attempt to write a readonly database': - print "You do not have sufficient rights to write to database: %s" % (cfg.get('auth.backends.local','db_path')) + print("You do not have sufficient rights to write to database: %s" % ( + cfg.get('auth.backends.local', 'db_path'))) else: - print exc + print(exc) elif args.action == 'modify': if not args.user: - print "Please specify user with --user" + print("Please specify user with --user") sys.exit(1) data = {} @@ -135,16 +140,17 @@ if __name__ == '__main__': data['readonly'] = args.readonly if len(data) == 0: - print "Please specify value to change" + print("Please specify value to change") sys.exit(1) try: a.modify_user(args.user, data) except nipap.authlib.AuthError as exc: if str(exc) == 'attempt to write a readonly database': - print "You do not have sufficient rights to write to database: %s" % (cfg.get('auth.backends.local','db_path')) + print("You do not have sufficient rights to write to database: %s" % ( + cfg.get('auth.backends.local', 'db_path'))) else: - print exc + print(exc) elif args.action == 'upgrade-database': a._upgrade_database() @@ -158,13 +164,13 @@ if __name__ == '__main__': try: latest = a._latest_db_version() if not latest: - print >> sys.stderr, "It seems your Sqlite database for local auth is out of date" - print >> sys.stderr, "Please run 'nipap-passwd upgrade-database' to upgrade your database." + print("It seems your Sqlite database for local auth is out of date", file=sys.stderr) + print("Please run 'nipap-passwd upgrade-database' to upgrade your database.", file=sys.stderr) sys.exit(2) - except nipap.authlib.AuthSqliteError, e: - print >> sys.stderr, "Error checking version of Sqlite database for local auth: %s" % e + except nipap.authlib.AuthSqliteError as e: + print("Error checking version of Sqlite database for local auth: %s" % e, file=sys.stderr) sys.exit(1) - print "Sqlite database for local auth is of the latest version." + print("Sqlite database for local auth is of the latest version.") sys.exit(0) else: diff --git a/nipap/nipap/authlib.py b/nipap/nipap/authlib.py index 0050e40fe..a3dc61c1d 100644 --- a/nipap/nipap/authlib.py +++ b/nipap/nipap/authlib.py @@ -69,7 +69,7 @@ import hashlib import traceback -from nipapconfig import NipapConfig +from .nipapconfig import NipapConfig # Used by auth modules import sqlite3 @@ -120,7 +120,7 @@ def _init_backends(self): auth_backend = section_components[1] self._backends[auth_backend] = eval(self._config.get(section, 'type')) - self._logger.debug("Registered auth backends %s" % str(self._backends)) + self._logger.debug("Registered auth backends %s", str(self._backends)) def reload(self): """ Reload AuthFactory. @@ -157,7 +157,7 @@ def get_auth_bearer_token(self, bearer_token, authoritative_source, auth_options def get_auth(self, username, password, authoritative_source, auth_options=None): """ Returns an authentication object. - + Examines the auth backend given after the '@' in the username and returns a suitable instance of a subclass of the BaseAuth class. @@ -176,7 +176,7 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): auth_options = {} # validate arguments - if (authoritative_source is None): + if authoritative_source is None: raise AuthError("Missing authoritative_source.") # remove invalid cache entries @@ -185,7 +185,7 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): if self._auth_cache[key]['valid_until'] < datetime.utcnow(): rem.append(key) for key in rem: - del (self._auth_cache[key]) + del self._auth_cache[key] user_authbackend = username.rsplit('@', 1) @@ -194,7 +194,7 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): backend = "" if len(user_authbackend) == 1: backend = self._config.get('auth', 'default_backend') - self._logger.debug("Using default auth backend %s" % backend) + self._logger.debug("Using default auth backend %s", backend) else: backend = user_authbackend[1] @@ -202,20 +202,19 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): auth_str = (str(username) + str(password) + str(authoritative_source) + str(auth_options)) if auth_str in self._auth_cache: - self._logger.debug('found cached auth object for user %s' % username) + self._logger.debug('found cached auth object for user %s', username) return self._auth_cache[auth_str]['auth_object'] # Create auth object try: auth = self._backends[backend](backend, user_authbackend[0], password, authoritative_source, auth_options) except KeyError: - raise AuthError("Invalid auth backend '%s' specified" % - str(backend)) + raise AuthError("Invalid auth backend '{}' specified".format(backend)) # save auth object to cache self._auth_cache[auth_str] = { 'valid_until': datetime.utcnow() + timedelta(seconds=self._config.getint('auth', 'auth_cache_timeout')), - 'auth_object': auth + 'auth_object': auth, } return auth @@ -223,7 +222,7 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): class BaseAuth: """ A base authentication class. - + All authentication modules should extend this class. """ @@ -512,10 +511,13 @@ def authenticate(self): self.password) except ldap.SERVER_DOWN as exc: raise AuthError('Could not connect to LDAP server') - except (ldap.INVALID_CREDENTIALS, ldap.INVALID_DN_SYNTAX, - ldap.UNWILLING_TO_PERFORM) as exc: + except ( + ldap.INVALID_CREDENTIALS, + ldap.INVALID_DN_SYNTAX, + ldap.UNWILLING_TO_PERFORM, + ) as exc: # Auth failed - self._logger.debug('erroneous password for user %s' % self.username) + self._logger.debug('erroneous password for user %s', self.username) self._authenticated = False return self._authenticated @@ -532,9 +534,12 @@ def authenticate(self): else: search_conn = self._ldap_conn - res = search_conn.search_s(self._ldap_basedn, ldap.SCOPE_SUBTREE, - self._ldap_search.format(ldap.dn.escape_dn_chars(self.username)), - ['cn', 'memberOf']) + res = search_conn.search_s( + self._ldap_basedn, + ldap.SCOPE_SUBTREE, + self._ldap_search.format(ldap.dn.escape_dn_chars(self.username)), + ['cn', 'memberOf'], + ) if res[0][1]['cn'][0] is not None: self.full_name = res[0][1]['cn'][0].decode('utf-8') # check for ro_group membership if ro_group is configured @@ -569,11 +574,8 @@ def authenticate(self): self._authenticated = True - self._logger.debug('successfully authenticated as ' + - '%s, username %s, full_name %s, readonly %s' % ( - self.authenticated_as, - self.username, self.full_name, - str(self.readonly))) + self._logger.debug('successfully authenticated as %s, username %s, full_name %s, readonly %s', + self.authenticated_as, self.username, self.full_name, str(self.readonly)) return self._authenticated @@ -619,12 +621,13 @@ def __init__(self, name, username, password, authoritative_source, auth_options= try: self._db_conn = sqlite3.connect( self._cfg.get('auth.backends.' + self.auth_backend, 'db_path'), - check_same_thread=False) + check_same_thread=False + ) self._db_conn.row_factory = sqlite3.Row self._db_curs = self._db_conn.cursor() except sqlite3.Error as exc: - self._logger.error('Could not open user database: %s' % str(exc)) + self._logger.error('Could not open user database: %s', str(exc)) raise AuthError(str(exc)) def _latest_db_version(self): @@ -639,9 +642,8 @@ def _latest_db_version(self): if len(self._db_curs.fetchall()) < 1: raise AuthSqliteError("No 'user' table.") - for column in ('username', 'pwd_salt', 'pwd_hash', 'full_name', - 'trusted', 'readonly'): - sql = "SELECT %s FROM user" % column + for column in ('username', 'pwd_salt', 'pwd_hash', 'full_name', 'trusted', 'readonly'): + sql = "SELECT " + column + " FROM user" try: self._db_curs.execute(sql) except: @@ -693,19 +695,19 @@ def authenticate(self): if self._authenticated is not None: return self._authenticated - self._logger.debug('Trying to authenticate as user \'%s\'' % self.username) + self._logger.debug('Trying to authenticate as user \'%s\'', self.username) user = self.get_user(self.username) # Was user found? if user is None: - self._logger.debug('unknown user %s' % self.username) + self._logger.debug('unknown user %s', self.username) self._authenticated = False return self._authenticated # verify password if self._gen_hash(self.password, user['pwd_salt']) != user['pwd_hash']: # Auth failed - self._logger.debug('erroneous password for user %s' % self.username) + self._logger.debug('erroneous password for user %s', self.username) self._authenticated = False return self._authenticated @@ -734,11 +736,8 @@ def authenticate(self): else: self.full_name = user['full_name'] - self._logger.debug( - 'successfully authenticated as' + - ' %s, username %s, full_name %s, readonly %s' % ( - self.authenticated_as, self.username, self.full_name, - str(self.readonly))) + self._logger.debug('successfully authenticated as %s, username %s, full_name %s, readonly %s', + self.authenticated_as, self.username, self.full_name, str(self.readonly)) return self._authenticated def get_user(self, username): @@ -777,8 +776,7 @@ def add_user(self, username, password, full_name=None, trusted=False, readonly=F (?, ?, ?, ?, ?, ?)''' try: self._db_curs.execute(sql, (username, salt, - self._gen_hash(password, salt), - full_name, trusted or False, + self._gen_hash(password, salt), full_name, trusted or False, readonly or False)) self._db_conn.commit() except (sqlite3.OperationalError, sqlite3.IntegrityError) as error: @@ -810,10 +808,10 @@ def modify_user(self, username, data): char_set = string.ascii_letters + string.digits data['pwd_salt'] = ''.join(random.choice(char_set) for x in range(8)) data['pwd_hash'] = self._gen_hash(data['password'], data['pwd_salt']) - del (data['password']) + del data['password'] sql = "UPDATE user SET " - sql += ', '.join("%s = ?" % k for k in sorted(data)) + sql += ', '.join(k + " = ?" for k in sorted(data)) sql += " WHERE username = ?" vals = [] @@ -843,8 +841,8 @@ def _gen_hash(self, password, salt): # generate hash h = hashlib.sha1() - h.update(salt) - h.update(password) + h.update(str.encode(salt)) # encode to bytes + h.update(str.encode(password)) # encode to bytes return h.hexdigest() diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 7550657e9..7b3d193c0 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -188,30 +188,30 @@ from functools import wraps import dateutil.parser import datetime -import exceptions import logging import psycopg2 import psycopg2.extras -import pytz +from psycopg2.extensions import adapt import shlex -import socket import time import re import IPy -from errors import * -import authlib -import smart_parsing -import db_schema +from .errors import * +from . import authlib +from . import smart_parsing +from . import db_schema import nipap # support multiple versions of parsedatetime try: import parsedatetime + pdt = parsedatetime.Calendar(parsedatetime.Constants(usePyICU=False)) except: import parsedatetime.parsedatetime import parsedatetime.parsedatetime_consts as pdc + pdt = parsedatetime.parsedatetime.Calendar(pdc.Constants()) @@ -547,63 +547,31 @@ '<<': '<<', 'contained_within': '<<', '<<=': '<<=', - 'contained_within_equals': '<<=' - } + 'contained_within_equals': '<<=', +} """ Maps operators in a prefix query to SQL operators. """ - def requires_rw(f): """ Adds readwrite authorization This will check if the user is a readonly user and if so reject the query. Apply this decorator to readwrite functions. """ - @wraps(f) + @wraps(f) def decorated(*args, **kwargs): auth = args[1] if auth.readonly: logger = logging.getLogger() - logger.info("read-only user '%s' is not authorized to run function '%s'" % (auth.username, f.__name__)) + logger.info("read-only user '%s' is not authorized to run function '%s'", auth.username, f.__name__) raise authlib.AuthorizationFailed("read-only user '%s' is not authorized to run function '%s'" % (auth.username, f.__name__)) return f(*args, **kwargs) return decorated - - - -class Inet(object): - """ This works around a bug in psycopg2 version somewhere before 2.4. The - __init__ function in the original class is broken and so this is merely - a copy with the bug fixed. - - Wrap a string to allow for correct SQL-quoting of inet values. - - Note that this adapter does NOT check the passed value to make sure it - really is an inet-compatible address but DOES call adapt() on it to make - sure it is impossible to execute an SQL-injection by passing an evil - value to the initializer. - """ - def __init__(self, addr): - self.addr = addr - - def prepare(self, conn): - self._conn = conn - - def getquoted(self): - obj = adapt(self.addr) - if hasattr(obj, 'prepare'): - obj.prepare(self._conn) - return obj.getquoted()+"::inet" - - def __str__(self): - return str(self.addr) - - def _parse_expires(expires): """ Parse the 'expires' attribute, guessing what format it is in and returning a datetime @@ -613,7 +581,7 @@ def _parse_expires(expires): return 'infinity' try: - return dateutil.parser.parse(unicode(expires)) + return dateutil.parser.parse(expires) except ValueError as exc: pass @@ -638,7 +606,7 @@ class Nipap: _logger = None _con_pg = None - _curs_pg = None + _curs_pg = None def __init__(self, auto_install_db=False, auto_upgrade_db=False): """ Constructor. @@ -649,7 +617,8 @@ def __init__(self, auto_install_db=False, auto_upgrade_db=False): self._logger = logging.getLogger(self.__class__.__name__) self._logger.debug("Initialising NIPAP") - from nipapconfig import NipapConfig + from .nipapconfig import NipapConfig + self._cfg = NipapConfig() self._auto_install_db = auto_install_db @@ -657,23 +626,10 @@ def __init__(self, auto_install_db=False, auto_upgrade_db=False): self._connect_db() - # # Miscellaneous help functions # - def _register_inet(self, oid=None, conn_or_curs=None): - """ Create the INET type and an Inet adapter.""" - from psycopg2 import extensions as _ext - if not oid: - oid = 869 - _ext.INET = _ext.new_type((oid, ), "INET", - lambda data, cursor: data and Inet(data) or None) - _ext.register_type(_ext.INET, self._con_pg) - return _ext.INET - - - def _is_ipv4(self, ip): """ Return true if given arg is a valid IPv4 address """ @@ -686,8 +642,6 @@ def _is_ipv4(self, ip): return True return False - - def _is_ipv6(self, ip): """ Return true if given arg is a valid IPv6 address """ @@ -700,13 +654,11 @@ def _is_ipv6(self, ip): return True return False - - def _get_afi(self, ip): """ Return address-family (4 or 6) for IP or None if invalid address """ - parts = unicode(ip).split("/") + parts = ip.split('/') if len(parts) == 1: # just an address if self._is_ipv4(ip): @@ -724,13 +676,13 @@ def _get_afi(self, ip): return None if self._is_ipv4(parts[0]): - if pl >= 0 and pl <= 32: + if 0 <= pl <= 32: # prefix mask must be between 0 and 32 return 4 # otherwise error return None elif self._is_ipv6(parts[0]): - if pl >= 0 and pl <= 128: + if 0 <= pl <= 128: # prefix mask must be between 0 and 128 return 6 # otherwise error @@ -741,8 +693,6 @@ def _get_afi(self, ip): # more than two parts.. this is neither an address or a prefix return None - - # # SQL related functions # @@ -765,7 +715,7 @@ def _connect_db(self): db_args['host'] = None for key in db_args.copy(): if db_args[key] is None: - del(db_args[key]) + del db_args[key] # Create database connection while True: @@ -773,24 +723,23 @@ def _connect_db(self): self._con_pg = psycopg2.connect(**db_args) self._con_pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self._curs_pg = self._con_pg.cursor(cursor_factory=psycopg2.extras.DictCursor) - self._register_inet() psycopg2.extras.register_hstore(self._con_pg, globally=True, unicode=True) except psycopg2.Error as exc: - if re.search("database.*does not exist", unicode(exc)): + if re.search("database.*does not exist", str(exc)): raise NipapDatabaseNonExistentError("Database '%s' does not exist" % db_args['database']) # no hstore extension, assume empty db (it wouldn't work # otherwise) and do auto upgrade? - if re.search("hstore type not found in the database", unicode(exc)): + if re.search("hstore type not found in the database", str(exc)): # automatically install if auto-install is enabled if self._auto_install_db: self._db_install(db_args['database']) continue raise NipapDatabaseMissingExtensionError("hstore extension not found in the database") - self._logger.error("pgsql: %s" % exc) + self._logger.error("pgsql: %s, using args: %s", exc, db_args) raise NipapError("Backend unable to connect to database") except psycopg2.Warning as warn: - self._logger.warning('pgsql: %s' % warn) + self._logger.warning('pgsql: %s', warn) # check db version try: @@ -804,25 +753,25 @@ def _connect_db(self): continue raise exc except NipapError as exc: - self._logger.error(unicode(exc)) + self._logger.error(str(exc)) raise exc if current_db_version != nipap.__db_version__: if self._auto_upgrade_db: self._db_upgrade(db_args['database']) continue - raise NipapDatabaseWrongVersionError("NIPAP PostgreSQL database is outdated. Schema version %s is required to run but you are using %s" % (nipap.__db_version__, current_db_version)) + raise NipapDatabaseWrongVersionError( + "NIPAP PostgreSQL database is outdated. Schema version {} is required to run but you are using " + "{}".format(nipap.__db_version__, current_db_version)) # if we reach this we should be fine and done break - - - def _execute(self, sql, opt=None, callno = 0): + def _execute(self, sql, opt=None, callno=0): """ Execute query, catch and log errors. """ - self._logger.debug("SQL: " + sql + " params: " + unicode(opt)) + self._logger.debug("SQL: %s params: %s", sql, str(opt)) try: self._curs_pg.execute(sql, opt) except psycopg2.InternalError as exc: @@ -843,22 +792,21 @@ def _execute(self, sql, opt=None, callno = 0): # we throw (and log) a more general exception. # determine if it's "one of our" exceptions or something else - if len(unicode(exc).split(":")) < 2: + if len(str(exc).split(":")) < 2: raise NipapError(exc) - code = unicode(exc).split(":", 1)[0] + code = str(exc).split(":", 1)[0] try: int(code) except: raise NipapError(exc) - text = unicode(exc).splitlines()[0].split(":", 1)[1] + text = str(exc).splitlines()[0].split(":", 1)[1] if code == '1200': raise NipapValueError(text) - estr = "Internal database error: %s" % exc - self._logger.error(estr) - raise NipapError(unicode(exc)) + self._logger.error("Internal database error: %s", exc) + raise NipapError(str(exc)) except psycopg2.IntegrityError as exc: self._con_pg.rollback() @@ -871,14 +819,13 @@ def _execute(self, sql, opt=None, callno = 0): if m is None: raise NipapDuplicateError("Objects primary keys already exist") cursor = self._con_pg.cursor() - cursor.execute(""" SELECT - obj_description(oid) + cursor.execute(""" SELECT obj_description(oid) FROM pg_class - WHERE relname = %(relname)s""", - { 'relname': m.group(1) }) + WHERE relname = %(relname)s""", {'relname': m.group(1)}) + column_desc = '' for desc in cursor: - column_desc = unicode(desc[0]) + column_desc = desc[0] # figure out the value for the duplicate value column_value = None @@ -889,13 +836,11 @@ def _execute(self, sql, opt=None, callno = 0): except: pass else: - raise NipapDuplicateError("Duplicate value for '" + - unicode(column_desc) + "', the value '" + - unicode(column_value) + "' is already in use.") + raise NipapDuplicateError("Duplicate value for '{}', the value '{}' is " + "already in use.".format(column_desc, column_value)) - raise NipapDuplicateError("Duplicate value for '" + - unicode(column_desc) + - "', the value you have inputted is already in use.") + raise NipapDuplicateError("Duplicate value for '{}', the value you have inputted is " + "already in use.".format(column_desc)) self._logger.exception("Unhandled database IntegrityError:") raise NipapError("Unhandled integrity error.") @@ -905,16 +850,16 @@ def _execute(self, sql, opt=None, callno = 0): m = re.search('invalid cidr value: "([^"]+)"', exc.pgerror) if m is not None: - strict_prefix = unicode(IPy.IP(m.group(1), make_net = True)) - estr = "Invalid prefix (%s); bits set to right of mask. Network address for current mask: %s" % (m.group(1), strict_prefix) - raise NipapValueError(estr) + strict_prefix = IPy.IP(m.group(1, make_net=True)) + estr = "Invalid prefix ({}); bits set to right of mask. Network address for current mask: {}" + raise NipapValueError(estr.format(m.group(1), strict_prefix)) - m = re.search('invalid input syntax for(?: type)? (\w+): "([^"]+)"', exc.pgerror) + m = re.search(r'invalid input syntax for(?: type)? (\w+): "([^"]+)"', exc.pgerror) if m is not None: if m.group(1) in ["cidr", "inet"]: - estr = "Invalid syntax for prefix (%s)" % m.group(2) + estr = "Invalid syntax for prefix ({})".format(m.group(2)) else: - estr = "Invalid syntax for %s (%s)" % (m.group(1), m.group(2)) + estr = "Invalid syntax for {} ({})".format(m.group(1), m.group(2)) raise NipapValueError(estr) self._logger.exception("Unhandled database DataError:") @@ -926,13 +871,13 @@ def _execute(self, sql, opt=None, callno = 0): except psycopg2.Error: pass - estr = "Unable to execute query: %s" % exc - self._logger.error(estr) + estr = "Unable to execute query: %s" + self._logger.error(estr, exc) # abort if we've already tried to reconnect if callno > 0: - self._logger.error(estr) - raise NipapError(estr) + self._logger.error(estr, exc) + raise NipapError(estr % exc) # reconnect to database and retry query self._logger.info("Reconnecting to database...") @@ -941,9 +886,7 @@ def _execute(self, sql, opt=None, callno = 0): return self._execute(sql, opt, callno + 1) except psycopg2.Warning as warn: - self._logger.warning(unicode(warn)) - - + self._logger.warning(warn) def _lastrowid(self): """ Get ID of last inserted column. @@ -954,9 +897,7 @@ def _lastrowid(self): for row in self._curs_pg: return row['last'] - - - def _sql_expand_insert(self, spec, key_prefix = '', col_prefix = ''): + def _sql_expand_insert(self, spec, key_prefix='', col_prefix=''): """ Expand a dict so it fits in a INSERT clause """ col = list(spec) @@ -971,9 +912,7 @@ def _sql_expand_insert(self, spec, key_prefix = '', col_prefix = ''): return sql, params - - - def _sql_expand_update(self, spec, key_prefix = '', col_prefix = ''): + def _sql_expand_update(self, spec, key_prefix='', col_prefix=''): """ Expand a dict so it fits in a INSERT clause """ sql = ', '.join(col_prefix + key + ' = %(' + key_prefix + key + ')s' for key in spec) @@ -983,16 +922,14 @@ def _sql_expand_update(self, spec, key_prefix = '', col_prefix = ''): return sql, params - - - def _sql_expand_where(self, spec, key_prefix = '', col_prefix = ''): + def _sql_expand_where(self, spec, key_prefix='', col_prefix=''): """ Expand a dict so it fits in a WHERE clause Logical operator is AND. """ sql = ' AND '.join(col_prefix + key + - ( ' IS ' if spec[key] is None else ' = ' ) + + (' IS ' if spec[key] is None else ' = ') + '%(' + key_prefix + key + ')s' for key in spec) params = {} for key in spec: @@ -1000,27 +937,23 @@ def _sql_expand_where(self, spec, key_prefix = '', col_prefix = ''): return sql, params - - # TODO: make this more generic and use for testing of spec too? def _check_attr(self, attr, req_attr, allowed_attr): + """ Check for presence of required attributes, and absence of illegal ones """ - """ - if type(attr) is not dict: + if not isinstance(attr, dict): raise NipapInputError("invalid input type, must be dict") for a in req_attr: - if not a in attr: - raise NipapMissingInputError("missing attribute %s" % a) + if a not in attr: + raise NipapMissingInputError("missing attribute {}".format(a)) for a in attr: if a not in allowed_attr: - raise NipapExtraneousInputError("extraneous attribute %s" % a) + raise NipapExtraneousInputError("extraneous attribute {}".format(a)) if 'avps' in attr and '' in attr['avps']: raise NipapValueError('AVP with empty name is not allowed') - - def _get_updated_rows(self, auth, function): """ Get rows updated by last update query @@ -1037,13 +970,7 @@ def _get_updated_rows(self, auth, function): # search_* API call. qps = [] for row in self._curs_pg: - qps.append( - { - 'operator': 'equals', - 'val1': 'id', - 'val2': row['id'] - } - ) + qps.append({'operator': 'equals', 'val1': 'id', 'val2': row['id']}) # if we didn't update anything return empty list if len(qps) == 0: @@ -1053,18 +980,12 @@ def _get_updated_rows(self, auth, function): q = qps[0] for qp in qps[1:]: - q = { - 'operator': 'or', - 'val1': q, - 'val2': qp - } + q = {'operator': 'or', 'val1': q, 'val2': qp} - updated = function(auth, q, { 'max_result': 10000 })['result'] + updated = function(auth, q, {'max_result': 10000})['result'] return updated - - def _get_query_parts(self, query_str, search_options=None): """ Split a query string into its parts """ @@ -1079,10 +1000,10 @@ def _get_query_parts(self, query_str, search_options=None): query_str_parts = [] try: for part in shlex.split(query_str.encode('utf-8')): - query_str_parts.append({ 'string': part.decode('utf-8') }) + query_str_parts.append({'string': part.decode('utf-8')}) except ValueError as exc: - if unicode(exc) == 'No closing quotation': - raise NipapValueError(unicode(exc)) + if str(exc) == 'No closing quotation': + raise NipapValueError(str(exc)) raise exc # Handle empty search. @@ -1090,21 +1011,20 @@ def _get_query_parts(self, query_str, search_options=None): # zero-element list for an empty string, so we have to append one # manually if len(query_str_parts) == 0: - query_str_parts.append({ 'string': '' }) + query_str_parts.append({'string': ''}) return query_str_parts - - def _get_db_version(self): """ Get the schema version of the nipap psql db. """ dbname = self._cfg.get('nipapd', 'db_name') - self._execute("SELECT description FROM pg_shdescription JOIN pg_database ON objoid = pg_database.oid WHERE datname = '%s'" % dbname) + self._execute("SELECT description FROM pg_shdescription JOIN pg_database ON objoid = pg_database.oid " + "WHERE datname = '" + dbname + "'") comment = self._curs_pg.fetchone() if comment is None: - raise NipapDatabaseNoVersionError("Could not find comment of psql database %s" % dbname) + raise NipapDatabaseNoVersionError("Could not find comment of psql database {}".format(dbname)) db_version = None m = re.match('NIPAP database - schema version: ([0-9]+)', comment[0]) @@ -1115,8 +1035,6 @@ def _get_db_version(self): return db_version - - def _db_install(self, db_name): """ Install nipap database schema """ @@ -1125,21 +1043,17 @@ def _db_install(self, db_name): self._execute(db_schema.functions) self._execute(db_schema.triggers) - - def _db_upgrade(self, db_name): """ Upgrade nipap database schema """ current_db_version = self._get_db_version() self._execute(db_schema.functions) for i in range(current_db_version, nipap.__db_version__): - self._logger.info("Upgrading DB schema:", i, "to", i+1) - upgrade_sql = db_schema.upgrade[i-1] # 0 count on array + self._logger.info("Upgrading DB schema: %s to %s", i, i + 1) + upgrade_sql = db_schema.upgrade[i - 1] # 0 count on array self._execute(upgrade_sql % (db_name)) self._execute(db_schema.triggers) - - # # VRF functions # @@ -1158,22 +1072,22 @@ def _expand_vrf_spec(self, spec): error will be thrown if both id and name is specified. """ - if type(spec) is not dict: + if not isinstance(spec, dict): raise NipapInputError("vrf specification must be a dict") allowed_values = ['id', 'name', 'rt'] for a in spec: if a not in allowed_values: - raise NipapExtraneousInputError("extraneous specification key %s" % a) + raise NipapExtraneousInputError("extraneous specification key {}".format(a)) if 'id' in spec: - if type(spec['id']) not in (int, long): + if not isinstance(spec['id'], int): raise NipapValueError("VRF specification key 'id' must be an integer.") elif 'rt' in spec: - if type(spec['rt']) != type(''): + if not isinstance(spec['rt'], str): raise NipapValueError("VRF specification key 'rt' must be a string.") elif 'name' in spec: - if type(spec['name']) != type(''): + if not isinstance(spec['name'], str): raise NipapValueError("VRF specification key 'name' must be a string.") if len(spec) > 1: raise NipapExtraneousInputError("VRF specification contains too many keys, specify VRF id, vrf or name.") @@ -1182,16 +1096,14 @@ def _expand_vrf_spec(self, spec): return where, params - - - def _expand_vrf_query(self, query, table_name = None): + def _expand_vrf_query(self, query, table_name=None): """ Expand VRF query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ - where = unicode() + where = str() opt = list() # handle table name, can be None @@ -1200,16 +1112,16 @@ def _expand_vrf_query(self, query, table_name = None): else: col_prefix = table_name + "." - if type(query['val1']) == dict and type(query['val2']) == dict: + if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_vrf_query(query['val1'], table_name) sub_where2, opt2 = self._expand_vrf_query(query['val2'], table_name) try: - where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) + where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: - raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) opt += opt1 opt += opt2 @@ -1221,11 +1133,11 @@ def _expand_vrf_query(self, query, table_name = None): # val1 is variable, val2 is string. if query['val1'] not in _vrf_spec: - raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) + raise NipapInputError("Search variable '{}' unknown".format(query['val1'])) # build where clause if query['operator'] not in _operation_map: - raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: @@ -1234,22 +1146,16 @@ def _expand_vrf_query(self, query, table_name = None): query['operator'] = 'is_not' if query['operator'] in ('equals_any',): - where = unicode(" %%s = ANY (%s%s::citext[]) " % - ( col_prefix, _vrf_spec[query['val1']]['column']) - ) + where = " %%s = ANY (%s%s::citext[]) " % (col_prefix, _vrf_spec[query['val1']]['column']) else: - where = unicode(" %s%s %s %%s " % - ( col_prefix, _vrf_spec[query['val1']]['column'], - _operation_map[query['operator']] ) - ) + where = " %s%s %s %%s " % (col_prefix, _vrf_spec[query['val1']]['column'], + _operation_map[query['operator']]) opt.append(query['val2']) return where, opt - - @requires_rw def add_vrf(self, auth, attr): """ Add a new VRF. @@ -1268,10 +1174,10 @@ def add_vrf(self, auth, attr): :py:func:`nipap.xmlrpc.NipapXMLRPC.add_vrf` for full understanding. """ - self._logger.debug("add_vrf called; attr: %s" % unicode(attr)) + self._logger.debug("add_vrf called; attr: %s", attr) # sanity check - do we have all attributes? - req_attr = [ 'rt', 'name' ] + req_attr = ['rt', 'name'] self._check_attr(attr, req_attr, _vrf_attrs) insert, params = self._sql_expand_insert(attr) @@ -1279,7 +1185,7 @@ def add_vrf(self, auth, attr): self._execute(sql, params) vrf_id = self._lastrowid() - vrf = self.list_vrf(auth, { 'id': vrf_id })[0] + vrf = self.list_vrf(auth, {'id': vrf_id})[0] # write to audit table audit_params = { @@ -1290,15 +1196,14 @@ def add_vrf(self, auth, attr): 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Added VRF %s with attr: %s' % (vrf['rt'], unicode(vrf)) + 'description': 'Added VRF %s with attr: %s' % (vrf['rt'], vrf) } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return vrf - @requires_rw def remove_vrf(self, auth, spec): """ Remove a VRF. @@ -1316,26 +1221,20 @@ def remove_vrf(self, auth, spec): understanding. """ - self._logger.debug("remove_vrf called; spec: %s" % unicode(spec)) + self._logger.debug("remove_vrf called; spec: %s", spec) # get list of VRFs to remove before removing them vrfs = self.list_vrf(auth, spec) # remove prefixes in VRFs for vrf in vrfs: - v4spec = { - 'prefix': '0.0.0.0/0', - 'vrf_id': vrf['id'] - } - v6spec = { - 'prefix': '::/0', - 'vrf_id': vrf['id'] - } - self.remove_prefix(auth, spec = v4spec, recursive = True) - self.remove_prefix(auth, spec = v6spec, recursive = True) + v4spec = {'prefix': '0.0.0.0/0', 'vrf_id': vrf['id']} + v6spec = {'prefix': '::/0', 'vrf_id': vrf['id']} + self.remove_prefix(auth, spec=v4spec, recursive=True) + self.remove_prefix(auth, spec=v6spec, recursive=True) where, params = self._expand_vrf_spec(spec) - sql = "DELETE FROM ip_net_vrf WHERE %s" % where + sql = "DELETE FROM ip_net_vrf WHERE " + where self._execute(sql, params) # write to audit table @@ -1351,9 +1250,7 @@ def remove_vrf(self, auth, spec): 'description': 'Removed vrf %s' % v['rt'] } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) - - + self._execute('INSERT INTO ip_net_log ' + sql, params) def list_vrf(self, auth, spec=None): """ Return a list of VRFs matching `spec`. @@ -1374,7 +1271,7 @@ def list_vrf(self, auth, spec=None): if spec is None: spec = {} - self._logger.debug("list_vrf called; spec: %s" % unicode(spec)) + self._logger.debug("list_vrf called; spec: %s", spec) sql = "SELECT * FROM ip_net_vrf" @@ -1395,9 +1292,7 @@ def list_vrf(self, auth, spec=None): return res - - - def _get_vrf(self, auth, spec, prefix = 'vrf_'): + def _get_vrf(self, auth, spec, prefix='vrf_'): """ Get a VRF based on prefix spec Shorthand function to reduce code in the functions below, since @@ -1414,23 +1309,20 @@ def _get_vrf(self, auth, spec, prefix = 'vrf_'): # if None, mangle it to being 0, ie our default VRF if spec[prefix + 'id'] is None: spec[prefix + 'id'] = 0 - vrf = self.list_vrf(auth, { 'id': spec[prefix + 'id'] }) + vrf = self.list_vrf(auth, {'id': spec[prefix + 'id']}) elif prefix + 'rt' in spec: - vrf = self.list_vrf(auth, { 'rt': spec[prefix + 'rt'] }) + vrf = self.list_vrf(auth, {'rt': spec[prefix + 'rt']}) elif prefix + 'name' in spec: - vrf = self.list_vrf(auth, { 'name': spec[prefix + 'name'] }) + vrf = self.list_vrf(auth, {'name': spec[prefix + 'name']}) else: # no VRF specified - return VRF "default" - vrf = self.list_vrf(auth, { 'id': 0 }) + vrf = self.list_vrf(auth, {'id': 0}) if len(vrf) > 0: return vrf[0] raise NipapNonExistentError('No matching VRF found.') - - - @requires_rw def edit_vrf(self, auth, spec, attr): """ Update VRFs matching `spec` with attributes `attr`. @@ -1448,8 +1340,7 @@ def edit_vrf(self, auth, spec, attr): understanding. """ - self._logger.debug("edit_vrf called; spec: %s attr: %s" % - (unicode(spec), unicode(attr))) + self._logger.debug("edit_vrf called; spec: %s attr: %s", spec, attr) # sanity check - do we have all attributes? self._check_attr(attr, [], _vrf_attrs) @@ -1459,7 +1350,7 @@ def edit_vrf(self, auth, spec, attr): where, params1 = self._expand_vrf_spec(spec) update, params2 = self._sql_expand_update(attr) - params = dict(params2.items() + params1.items()) + params = dict(list(params2.items()) + list(params1.items())) if len(attr) == 0: raise NipapInputError("'attr' must not be empty.") @@ -1481,15 +1372,13 @@ def edit_vrf(self, auth, spec, attr): 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Edited VRF %s attr: %s' % (v['rt'], unicode(attr)) + 'description': 'Edited VRF %s attr: %s' % (v['rt'], attr) } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return updated_vrfs - - def search_vrf(self, auth, query, search_options=None): """ Search VRF list for VRFs matching `query`. @@ -1587,8 +1476,7 @@ def search_vrf(self, auth, query, search_options=None): try: search_options['max_result'] = int(search_options['max_result']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'max_result'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'max_result'. Only integer values allowed.") # offset if 'offset' not in search_options: @@ -1597,10 +1485,9 @@ def search_vrf(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'offset'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'offset'. Only integer values allowed.") - self._logger.debug('search_vrf called; query: %s search_options: %s' % (unicode(query), unicode(search_options))) + self._logger.debug('search_vrf called; query: %s search_options: %s', query, search_options) opt = None sql = """ SELECT * FROM ip_net_vrf""" @@ -1611,16 +1498,15 @@ def search_vrf(self, auth, query, search_options=None): where, opt = self._expand_vrf_query(query) sql += " WHERE " + where - sql += " ORDER BY vrf_rt_order(rt) NULLS FIRST LIMIT " + unicode(search_options['max_result']) + " OFFSET " + unicode(search_options['offset']) + sql += " ORDER BY vrf_rt_order(rt) NULLS FIRST LIMIT %s OFFSET %s" % ( + search_options['max_result'], search_options['offset'],) self._execute(sql, opt) result = list() for row in self._curs_pg: result.append(dict(row)) - return { 'search_options': search_options, 'result': result } - - + return {'search_options': search_options, 'result': result} def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search on VRF list. @@ -1667,7 +1553,7 @@ def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=Non if search_options is None: search_options = {} - self._logger.debug("smart_search_vrf query string: %s" % query_str) + self._logger.debug("smart_search_vrf query string: %s", query_str) success, query = self._parse_vrf_query(query_str) if not success: @@ -1676,17 +1562,13 @@ def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=Non 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed' + 'error_message': 'query interpretation failed', } if extra_query is not None: - query = { - 'operator': 'and', - 'val1': query, - 'val2': extra_query - } + query = {'operator': 'and', 'val1': query, 'val2': extra_query} - self._logger.debug("smart_search_vrf; query expanded to: %s" % unicode(query)) + self._logger.debug("smart_search_vrf; query expanded to: %s", query) search_result = self.search_vrf(auth, query, search_options) search_result['interpretation'] = query @@ -1694,8 +1576,6 @@ def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=Non return search_result - - def _parse_vrf_query(self, query_str): """ Parse a smart search query for VRFs @@ -1706,8 +1586,6 @@ def _parse_vrf_query(self, query_str): query = sp.parse(query_str) return query - - # # Pool functions # @@ -1715,39 +1593,37 @@ def _expand_pool_spec(self, spec): """ Expand pool specification to sql. """ - if type(spec) is not dict: + if not isinstance(spec, dict): raise NipapInputError("pool specification must be a dict") - allowed_values = ['id', 'name' ] + allowed_values = ['id', 'name'] for a in spec: if a not in allowed_values: - raise NipapExtraneousInputError("extraneous specification key %s" % a) + raise NipapExtraneousInputError("extraneous specification key {}".format(a)) if 'id' in spec: - if type(spec['id']) not in (long, int): + if not isinstance(spec['id'], int): raise NipapValueError("pool specification key 'id' must be an integer") - if spec != { 'id': spec['id'] }: + if spec != {'id': spec['id']}: raise NipapExtraneousInputError("pool specification with 'id' should not contain anything else") elif 'name' in spec: - if type(spec['name']) != type(''): + if not isinstance(spec['name'], str): raise NipapValueError("pool specification key 'name' must be a string") if 'id' in spec: - raise NipapExtraneousInputError("pool specification contain both 'id' and 'name', specify pool id or name") + raise NipapExtraneousInputError("pool specification contain 'id' and 'name', specify pool id or name") where, params = self._sql_expand_where(spec, 'spec_', 'po.') return where, params - - - def _expand_pool_query(self, query, table_name = None): + def _expand_pool_query(self, query, table_name=None): """ Expand pool query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ - where = unicode() + where = '' opt = list() # handle table name, can be None @@ -1756,17 +1632,16 @@ def _expand_pool_query(self, query, table_name = None): else: col_prefix = table_name + "." - - if type(query['val1']) == dict and type(query['val2']) == dict: + if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_pool_query(query['val1'], table_name) sub_where2, opt2 = self._expand_pool_query(query['val2'], table_name) try: - where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) + where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: - raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) opt += opt1 opt += opt2 @@ -1778,11 +1653,11 @@ def _expand_pool_query(self, query, table_name = None): # val1 is variable, val2 is string. if query['val1'] not in _pool_spec: - raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) + raise NipapInputError("Search variable '{}' unknown".format(query['val1'])) # build where clause if query['operator'] not in _operation_map: - raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: @@ -1791,22 +1666,16 @@ def _expand_pool_query(self, query, table_name = None): query['operator'] = 'is_not' if query['operator'] in ('equals_any',): - where = unicode(" %%s = ANY (%s%s::citext[]) " % - ( col_prefix, _pool_spec[query['val1']]['column']) - ) + where = " %%s = ANY (%s%s::citext[]) " % (col_prefix, _pool_spec[query['val1']]['column']) else: - where = unicode(" %s%s %s %%s " % - ( col_prefix, _pool_spec[query['val1']]['column'], - _operation_map[query['operator']] ) - ) + where = " %s%s %s %%s " % (col_prefix, _pool_spec[query['val1']]['column'], + _operation_map[query['operator']]) opt.append(query['val2']) return where, opt - - @requires_rw def add_pool(self, auth, attr): """ Create a pool according to `attr`. @@ -1824,7 +1693,7 @@ def add_pool(self, auth, attr): understanding. """ - self._logger.debug("add_pool called; attrs: %s" % unicode(attr)) + self._logger.debug("add_pool called; attrs: %s", attr) # sanity check - do we have all attributes? req_attr = ['name', 'description', 'default_type'] @@ -1835,7 +1704,7 @@ def add_pool(self, auth, attr): self._execute(sql, params) pool_id = self._lastrowid() - pool = self.list_pool(auth, { 'id': pool_id })[0] + pool = self.list_pool(auth, {'id': pool_id})[0] # write to audit table audit_params = { @@ -1845,15 +1714,13 @@ def add_pool(self, auth, attr): 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Added pool %s with attr: %s' % (pool['name'], unicode(attr)) + 'description': 'Added pool ' + pool['name'] + ' with attr: ' + str(attr), } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return pool - - @requires_rw def remove_pool(self, auth, spec): """ Remove a pool. @@ -1869,13 +1736,13 @@ def remove_pool(self, auth, spec): understanding. """ - self._logger.debug("remove_pool called; spec: %s" % unicode(spec)) + self._logger.debug("remove_pool called; spec: %s", spec) # fetch list of pools to remove before they are removed pools = self.list_pool(auth, spec) where, params = self._expand_pool_spec(spec) - sql = "DELETE FROM ip_net_pool AS po WHERE %s" % where + sql = "DELETE FROM ip_net_pool AS po WHERE " + where self._execute(sql, params) # write to audit table @@ -1888,15 +1755,13 @@ def remove_pool(self, auth, spec): for p in pools: audit_params['pool_id'] = p['id'], audit_params['pool_name'] = p['name'], - audit_params['description'] = 'Removed pool %s' % p['name'] + audit_params['description'] = 'Removed pool ' + p['name'] sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) - - + self._execute('INSERT INTO ip_net_log ' + sql, params) def list_pool(self, auth, spec=None): - """ Return a list of pools. + """Return a list of pools. * `auth` [BaseAuth] AAA options. @@ -1914,7 +1779,7 @@ def list_pool(self, auth, spec=None): if spec is None: spec = {} - self._logger.debug("list_pool called; spec: %s" % unicode(spec)) + self._logger.debug("list_pool called; spec: %s", spec) sql = """SELECT DISTINCT (po.id), po.id, @@ -1968,7 +1833,6 @@ def list_pool(self, auth, spec=None): return res - def _check_pool_attr(self, attr, req_attr=None): """ Check pool attributes. """ @@ -1982,11 +1846,8 @@ def _check_pool_attr(self, attr, req_attr=None): # validate IPv4 prefix length if attr.get('ipv4_default_prefix_length') is not None: try: - attr['ipv4_default_prefix_length'] = \ - int(attr['ipv4_default_prefix_length']) - - if (attr['ipv4_default_prefix_length'] > 32 or - attr['ipv4_default_prefix_length'] < 1): + attr['ipv4_default_prefix_length'] = int(attr['ipv4_default_prefix_length']) + if not (1 <= attr['ipv4_default_prefix_length'] <= 32): raise ValueError() except ValueError: raise NipapValueError('Default IPv4 prefix length must be an integer between 1 and 32.') @@ -1994,17 +1855,13 @@ def _check_pool_attr(self, attr, req_attr=None): # validate IPv6 prefix length if attr.get('ipv6_default_prefix_length'): try: - attr['ipv6_default_prefix_length'] = \ - int(attr['ipv6_default_prefix_length']) + attr['ipv6_default_prefix_length'] = int(attr['ipv6_default_prefix_length']) - if (attr['ipv6_default_prefix_length'] > 128 or - attr['ipv6_default_prefix_length'] < 1): + if not (1 <= attr['ipv6_default_prefix_length'] <= 128): raise ValueError() except ValueError: raise NipapValueError('Default IPv6 prefix length must be an integer between 1 and 128.') - - def _get_pool(self, auth, spec): """ Get a pool. @@ -2021,8 +1878,6 @@ def _get_pool(self, auth, spec): raise NipapInputError("non-existing pool specified") return pool[0] - - @requires_rw def edit_pool(self, auth, spec, attr): """ Update pool given by `spec` with attributes `attr`. @@ -2040,17 +1895,16 @@ def edit_pool(self, auth, spec, attr): understanding. """ - self._logger.debug("edit_pool called; spec: %s attr: %s" % - (unicode(spec), unicode(attr))) + self._logger.debug("edit_pool called; spec: %s attr: %s", spec, attr) - if ('id' not in spec and 'name' not in spec) or ( 'id' in spec and 'name' in spec ): - raise NipapMissingInputError('''pool spec must contain either 'id' or 'name' ''') + if ('id' not in spec and 'name' not in spec) or ('id' in spec and 'name' in spec): + raise NipapMissingInputError("pool spec must contain either 'id' or 'name'") self._check_pool_attr(attr) where, params1 = self._expand_pool_spec(spec) update, params2 = self._sql_expand_update(attr) - params = dict(params2.items() + params1.items()) + params = dict(list(params2.items()) + list(params1.items())) pools = self.list_pool(auth, spec) @@ -2067,20 +1921,18 @@ def edit_pool(self, auth, spec, attr): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source + 'authoritative_source': auth.authoritative_source, } for p in pools: audit_params['pool_id'] = p['id'] audit_params['pool_name'] = p['name'] - audit_params['description'] = 'Edited pool %s attr: %s' % (p['name'], unicode(attr)) + audit_params['description'] = 'Edited pool ' + p['name'] + ' attr: ' + str(attr) sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return updated_pools - - def search_pool(self, auth, query, search_options=None): """ Search pool list for pools matching `query`. @@ -2178,8 +2030,7 @@ def search_pool(self, auth, query, search_options=None): try: search_options['max_result'] = int(search_options['max_result']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'max_result'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'max_result'. Only integer values allowed.") # offset if 'offset' not in search_options: @@ -2188,10 +2039,9 @@ def search_pool(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'offset'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'offset'. Only integer values allowed.") - self._logger.debug('search_pool search_options: %s' % unicode(search_options)) + self._logger.debug('search_pool search_options: %s', search_options) where, opt = self._expand_pool_query(query) sql = """SELECT DISTINCT (po.id), @@ -2225,7 +2075,7 @@ def search_pool(self, auth, query, search_options=None): LEFT OUTER JOIN ip_net_plan AS inp ON (inp.pool_id = po.id) LEFT OUTER JOIN ip_net_vrf AS vrf ON (vrf.id = inp.vrf_id) WHERE """ + where + """ ORDER BY po.name - LIMIT """ + unicode(search_options['max_result']) + """ OFFSET """ + unicode(search_options['offset']) + LIMIT %s OFFSET %s""" % (search_options['max_result'], search_options['offset']) self._execute(sql, opt) @@ -2233,9 +2083,7 @@ def search_pool(self, auth, query, search_options=None): for row in self._curs_pg: result.append(dict(row)) - return { 'search_options': search_options, 'result': result } - - + return {'search_options': search_options, 'result': result} def smart_search_pool(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search on pool list. @@ -2281,7 +2129,7 @@ def smart_search_pool(self, auth, query_str, search_options=None, extra_query=No if search_options is None: search_options = {} - self._logger.debug("smart_search_pool query string: %s" % query_str) + self._logger.debug("smart_search_pool query string: %s", query_str) success, query = self._parse_pool_query(query_str) if not success: @@ -2290,17 +2138,13 @@ def smart_search_pool(self, auth, query_str, search_options=None, extra_query=No 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed' - } + 'error_message': 'query interpretation failed', + } if extra_query is not None: - query = { - 'operator': 'and', - 'val1': query, - 'val2': extra_query - } + query = {'operator': 'and', 'val1': query, 'val2': extra_query} - self._logger.debug("smart_search_pool; query expanded to: %s" % unicode(query)) + self._logger.debug("smart_search_pool; query expanded to: %s", query) search_result = self.search_pool(auth, query, search_options) search_result['interpretation'] = query @@ -2308,7 +2152,6 @@ def smart_search_pool(self, auth, query_str, search_options=None, extra_query=No return search_result - def _parse_pool_query(self, query_str): """ Parse a smart search query for pools @@ -2319,22 +2162,20 @@ def _parse_pool_query(self, query_str): query = sp.parse(query_str) return query - - # # PREFIX FUNCTIONS # - def _expand_prefix_spec(self, spec, prefix = ''): + def _expand_prefix_spec(self, spec, prefix=''): """ Expand prefix specification to SQL. """ # sanity checks - if type(spec) is not dict: + if not isinstance(spec, dict): raise NipapInputError('invalid prefix specification') - for key in spec.keys(): + for key in spec: if key not in _prefix_spec: - raise NipapExtraneousInputError("Key '" + key + "' not allowed in prefix spec.") + raise NipapExtraneousInputError("Key '{}' not allowed in prefix spec.".format(key)) where = "" params = {} @@ -2347,7 +2188,7 @@ def _expand_prefix_spec(self, spec, prefix = ''): family = None if 'family' in spec: family = spec['family'] - del(spec['family']) + del spec['family'] # rename prefix columns spec2 = {} @@ -2363,15 +2204,15 @@ def _expand_prefix_spec(self, spec, prefix = ''): if prefix + 'vrf_name' in spec: spec['vrf.name'] = spec[prefix + 'vrf_name'] - del(spec[prefix + 'vrf_name']) + del spec[prefix + 'vrf_name'] if prefix + 'vrf_rt' in spec: spec['vrf.rt'] = spec[prefix + 'vrf_rt'] - del(spec[prefix + 'vrf_rt']) + del spec[prefix + 'vrf_rt'] if prefix + 'pool_name' in spec: spec['pool.name'] = spec[prefix + 'pool_name'] - del(spec[prefix + 'pool_name']) + del spec[prefix + 'pool_name'] where, params = self._sql_expand_where(spec) @@ -2384,19 +2225,17 @@ def _expand_prefix_spec(self, spec, prefix = ''): where += " AND family(" + prefix + "prefix) = %(family)s" params['family'] = family - self._logger.debug("_expand_prefix_spec; where: %s params: %s" % (where, unicode(params))) + self._logger.debug("_expand_prefix_spec; where: %s params: %s", where, params) return where, params - - - def _expand_prefix_query(self, query, table_name = None): + def _expand_prefix_query(self, query, table_name=None): """ Expand prefix query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ - where = unicode() + where = '' opt = list() # handle table name, can be None @@ -2405,21 +2244,21 @@ def _expand_prefix_query(self, query, table_name = None): else: col_prefix = table_name + "." - if 'val1' not in query: + if 'val1' not in query or query['val1'] is None: raise NipapMissingInputError("'val1' must be specified") - if 'val2' not in query: - raise NipapMissingInputError("'val2' must be specified") + if 'val2' not in query or query['val2'] is None: + raise NipapMissingInputError("Value (val2 in API) for '{}' must be specified".format(query['val1'])) - if type(query['val1']) == dict and type(query['val2']) == dict: + if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): # Sub expression, recurse! This is used for boolean operators: AND OR - # add parantheses + # add parenthesis sub_where1, opt1 = self._expand_prefix_query(query['val1'], table_name) sub_where2, opt2 = self._expand_prefix_query(query['val2'], table_name) try: - where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) + where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: - raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) opt += opt1 opt += opt2 @@ -2431,11 +2270,11 @@ def _expand_prefix_query(self, query, table_name = None): # val1 is key, val2 is value. if query['val1'] not in _prefix_spec: - raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) + raise NipapInputError("Search variable '{}' unknown".format(query['val1'])) # build where clause if query['operator'] not in _operation_map: - raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) if query['val1'] == 'vrf_id' and query['val2'] is None: query['val2'] = 0 @@ -2450,42 +2289,28 @@ def _expand_prefix_query(self, query, table_name = None): 'contains', 'contains_equals', 'contained_within', - 'contained_within_equals'): - - where = " iprange(prefix) %(operator)s %%s " % { - 'col_prefix': col_prefix, - 'operator': _operation_map[query['operator']] - } + 'contained_within_equals', + ): + # NOTE: removed col_prefix since it wasn't used + where = " iprange(prefix) " + _operation_map[query['operator']] + " %s " elif query['operator'] in ('equals_any',): - where = unicode(" %%s = ANY (%s%s::citext[]) " % - ( col_prefix, _prefix_spec[query['val1']]['column']) - ) - - elif query['operator'] in ( - 'like', - 'regex_match', - 'regex_not_match'): - # we COALESCE column with '' to allow for example a regexp - # search on '.*' to match columns which are NULL in the - # database - where = unicode(" COALESCE(%s%s, '') %s %%s " % - ( col_prefix, _prefix_spec[query['val1']]['column'], - _operation_map[query['operator']] ) - ) + where = " %s = ANY (" + col_prefix + _prefix_spec[query['val1']]['column'] + "::citext[]) " + + elif query['operator'] in ('like', 'regex_match', 'regex_not_match'): + # we COALESCE column with '' to allow for example a regexp search on '.*' to match columns + # which are NULL in the database + where = " COALESCE(" + col_prefix + _prefix_spec[query['val1']]['column'] + ", '') " + _operation_map[ + query['operator']] + " %s " else: - where = unicode(" %s%s %s %%s " % - ( col_prefix, _prefix_spec[query['val1']]['column'], - _operation_map[query['operator']] ) - ) + where = ' ' + col_prefix + _prefix_spec[query['val1']]['column'] + ' ' + _operation_map[ + query['operator']] + ' %s ' opt.append(query['val2']) return where, opt - - @requires_rw def add_prefix(self, auth, attr, args=None): """ Add a prefix and return its ID. @@ -2530,14 +2355,14 @@ def add_prefix(self, auth, attr, args=None): if args is None: args = {} - self._logger.debug("add_prefix called; attr: %s; args: %s" % (unicode(attr), unicode(args))) + self._logger.debug("add_prefix called; attr: %s; args: %s", attr, args) # args defined? if args is None: args = {} # attr must be a dict! - if type(attr) != dict: + if not isinstance(attr, dict): raise NipapInputError("'attr' must be a dict") # handle pool attributes - find correct one and remove bad pool keys @@ -2546,33 +2371,24 @@ def add_prefix(self, auth, attr, args=None): if 'pool_id' in attr or 'pool_name' in attr: if 'pool_id' in attr: if attr['pool_id'] is None: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} else: - pool = self._get_pool(auth, { 'id': attr['pool_id'] }) + pool = self._get_pool(auth, {'id': attr['pool_id']}) else: if attr['pool_name'] is None: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} else: # resolve pool name to pool id - pool = self._get_pool(auth, { 'name': attr['pool_name'] }) + pool = self._get_pool(auth, {'name': attr['pool_name']}) # and delete the pool_name attr - del(attr['pool_name']) + del attr['pool_name'] attr['pool_id'] = pool['id'] else: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} attr['authoritative_source'] = auth.authoritative_source @@ -2582,7 +2398,7 @@ def add_prefix(self, auth, attr, args=None): raise NipapExtraneousInputError("specify 'prefix' or 'from-prefix' or 'from-pool'") else: - if ('from-pool' not in args and 'from-prefix' not in args) or ('from-pool' in args and 'from-prefix' in args): + if ('from-pool' in args) == ('from-prefix' in args): raise NipapExtraneousInputError("specify 'prefix' or 'from-prefix' or 'from-pool'") # VRF handling for manually specified prefix @@ -2590,9 +2406,9 @@ def add_prefix(self, auth, attr, args=None): # handle VRF - find the correct one and remove bad VRF keys vrf = self._get_vrf(auth, attr) if 'vrf_rt' in attr: - del(attr['vrf_rt']) + del attr['vrf_rt'] if 'vrf_name' in attr: - del(attr['vrf_name']) + del attr['vrf_name'] attr['vrf_id'] = vrf['id'] # VRF handling for allocation from pool or parent prefix @@ -2602,9 +2418,9 @@ def add_prefix(self, auth, attr, args=None): # handle VRF - find the correct one and remove bad VRF keys vrf = self._get_vrf(auth, attr) if 'vrf_rt' in attr: - del(attr['vrf_rt']) + del attr['vrf_rt'] if 'vrf_name' in attr: - del(attr['vrf_name']) + del attr['vrf_name'] attr['vrf_id'] = vrf['id'] if 'from-pool' in args: @@ -2633,9 +2449,9 @@ def add_prefix(self, auth, attr, args=None): # handle VRF - find the correct one and remove bad VRF keys vrf = self._get_vrf(auth, attr) if 'vrf_rt' in attr: - del(attr['vrf_rt']) + del attr['vrf_rt'] if 'vrf_name' in attr: - del(attr['vrf_name']) + del attr['vrf_name'] attr['vrf_id'] = vrf['id'] # VRF fiddling @@ -2650,7 +2466,7 @@ def add_prefix(self, auth, attr, args=None): raise NipapNonExistentError("no free prefix found") # do we have all attributes? - req_attr = [ 'prefix', 'authoritative_source' ] + req_attr = ['prefix', 'authoritative_source'] self._check_attr(attr, req_attr, _prefix_attrs) if ('description' not in attr) and ('node' not in attr): raise NipapMissingInputError('Either description or node must be specified.') @@ -2663,7 +2479,7 @@ def add_prefix(self, auth, attr, args=None): self._execute(sql, params) prefix_id = self._lastrowid() - prefix = self.list_prefix(auth, { 'id': prefix_id })[0] + prefix = self.list_prefix(auth, {'id': prefix_id})[0] # write to audit table audit_params = { @@ -2676,23 +2492,22 @@ def add_prefix(self, auth, attr, args=None): 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Added prefix %s with attr: %s' % (prefix['prefix'], unicode(attr)) + 'description': 'Added prefix ' + prefix['prefix'] + ' with attr: ' + str(attr), } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) if pool['id'] is not None: audit_params['pool_id'] = pool['id'] audit_params['pool_name'] = pool['name'] - audit_params['description'] = 'Pool %s expanded with prefix %s in VRF %s' % (pool['name'], prefix['prefix'], unicode(prefix['vrf_rt'])) + audit_params['description'] = 'Pool ' + pool['name'] + ' expanded with prefix ' + prefix[ + 'prefix'] + ' in VRF ' + prefix['vrf_rt'] sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return prefix - - @requires_rw def edit_prefix(self, auth, spec, attr): """ Update prefix matching `spec` with attributes `attr`. @@ -2714,48 +2529,38 @@ def edit_prefix(self, auth, spec, attr): understanding. """ - self._logger.debug("edit_prefix called; spec: %s attr: %s" % - (unicode(spec), unicode(attr))) + self._logger.debug("edit_prefix called; spec: %s attr: %s", spec, attr) # Handle Pool - find correct one and remove bad pool keys pool = None if 'pool_id' in attr or 'pool_name' in attr: if 'pool_id' in attr: if attr['pool_id'] is None: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} else: - pool = self._get_pool(auth, { 'id': attr['pool_id'] }) + pool = self._get_pool(auth, {'id': attr['pool_id']}) else: if attr['pool_name'] is None: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} else: # resolve pool name to pool id - pool = self._get_pool(auth, { 'name': attr['pool_name'] }) + pool = self._get_pool(auth, {'name': attr['pool_name']}) # and delete the pool_name attr - del(attr['pool_name']) + del attr['pool_name'] attr['pool_id'] = pool['id'] else: - pool = { - 'id': None, - 'name': None - } + pool = {'id': None, 'name': None} # Handle VRF in attributes - find the correct one and remove bad VRF keys. if 'vrf_rt' in attr or 'vrf_name' in attr or 'vrf_id' in attr: vrf = self._get_vrf(auth, attr) if 'vrf_rt' in attr: - del(attr['vrf_rt']) + del attr['vrf_rt'] if 'vrf_name' in attr: - del(attr['vrf_name']) + del attr['vrf_name'] attr['vrf_id'] = vrf['id'] self._check_attr(attr, [], _prefix_attrs) @@ -2767,7 +2572,7 @@ def edit_prefix(self, auth, spec, attr): where, params1 = self._expand_prefix_spec(spec.copy()) update, params2 = self._sql_expand_update(attr) - params = dict(params2.items() + params1.items()) + params = dict(list(params2.items()) + list(params1.items())) sql = "UPDATE ip_net_plan SET " + update + " WHERE " + where sql += " RETURNING id" @@ -2789,9 +2594,9 @@ def edit_prefix(self, auth, spec, attr): audit_params['vrf_name'] = p['vrf_name'] audit_params['prefix_id'] = p['id'] audit_params['prefix_prefix'] = p['prefix'] - audit_params['description'] = 'Edited prefix %s attr: %s' % (p['prefix'], unicode(attr)) + audit_params['description'] = 'Edited prefix ' + p['prefix'] + ' attr: ' + str(attr) sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) # Only add to log if something was changed if p['pool_id'] != pool['id']: @@ -2813,27 +2618,25 @@ def edit_prefix(self, auth, spec, attr): audit_params2['pool_id'] = pool['id'] audit_params2['pool_name'] = pool['name'] - audit_params2['description'] = 'Expanded pool %s with prefix %s' % (pool['name'], p['prefix']) + audit_params2['description'] = 'Expanded pool ' + pool['name'] + ' with prefix ' + p['prefix'] sql, params = self._sql_expand_insert(audit_params2) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) # if prefix had pool set previously, prefix was removed from that pool if p['pool_id'] is not None: - pool2 = self._get_pool(auth, { 'id': p['pool_id'] }) + pool2 = self._get_pool(auth, {'id': p['pool_id']}) audit_params2['pool_id'] = pool2['id'] audit_params2['pool_name'] = pool2['name'] - audit_params2['description'] = 'Removed prefix %s from pool %s' % (p['prefix'], pool2['name']) + audit_params2['description'] = 'Removed prefix ' + p['prefix'] + ' from pool ' + pool2['name'] sql, params = self._sql_expand_insert(audit_params2) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return updated_prefixes - - def find_free_prefix(self, auth, vrf, args): """ Finds free prefixes in the sources given in `args`. @@ -2869,7 +2672,7 @@ def find_free_prefix(self, auth, vrf, args): Instead of specifying a pool, a prefix which will be searched for new prefixes can be specified. In `args`, the key :attr:`from-prefix` is set to list of prefixes you want to - allocate from and the key :attr:`prefix_length` is set to + allocate from and the key :attr:`prefix_length` is set to the wanted prefix length. Example:: @@ -2892,7 +2695,7 @@ def find_free_prefix(self, auth, vrf, args): """ # input sanity - if type(args) is not dict: + if not isinstance(args, dict): raise NipapInputError("invalid input, please provide dict as args") # TODO: find good default value for max_num @@ -2910,12 +2713,12 @@ def find_free_prefix(self, auth, vrf, args): if 'family' not in args: raise NipapMissingInputError("'family' must be specified with 'from-pool' mode") try: - assert int(args['family']) in [ 4, 6 ] + assert int(args['family']) in [4, 6] except (TypeError, AssertionError): raise NipapValueError("incorrect family specified, must be 4 or 6") elif 'from-prefix' in args: - if type(args['from-prefix']) is not list: + if not isinstance(args['from-prefix'], list): raise NipapInputError("from-prefix should be a list") if 'from-pool' in args: raise NipapInputError("specify 'from-pool' OR 'from-prefix'") @@ -2937,7 +2740,7 @@ def find_free_prefix(self, auth, vrf, args): if self._get_afi(p) == int(args['family']): prefixes.append(p) if len(prefixes) == 0: - raise NipapInputError('No prefixes of family %s in pool' % unicode(args['family'])) + raise NipapInputError('No prefixes of family {} in pool'.format(args['family'])) if 'prefix_length' not in args: if int(args['family']) == 4: wpl = pool_result[0]['ipv4_default_prefix_length'] @@ -2972,15 +2775,16 @@ def find_free_prefix(self, auth, vrf, args): params = {} # TODO: this makes me want to piss my pants # we should really write a patch to psycopg2 or something to - # properly adapt an python list of texts with values looking + # properly adapt a python list of texts with values looking # like prefixes to a postgresql array of inets - sql_prefix = ' UNION '.join('SELECT %(prefix' + unicode(prefixes.index(p)) + ')s AS prefix' for p in prefixes) + # UPDATE: This could actually be supported now, only I'm not comfortable messing with this + sql_prefix = ' UNION '.join('SELECT %(prefix' + str(prefixes.index(p)) + ')s AS prefix' for p in prefixes) for p in prefixes: - params['prefix' + unicode(prefixes.index(p))] = unicode(p) + params['prefix%s' % prefixes.index(p)] = p damp = 'SELECT array_agg((prefix::text)::inet) FROM (' + sql_prefix + ') AS a' - sql = """SELECT * FROM find_free_prefix(%(vrf_id)s, (""" + damp + """), %(prefix_length)s, %(max_result)s) AS prefix""" + sql = "SELECT * FROM find_free_prefix(%(vrf_id)s, (" + damp + "), %(prefix_length)s, %(max_result)s) AS prefix" v = self._get_vrf(auth, vrf or {}, '') @@ -2993,13 +2797,11 @@ def find_free_prefix(self, auth, vrf, args): res = list() for row in self._curs_pg: - res.append(unicode(row['prefix'])) + res.append(row['prefix']) return res - - - def list_prefix(self, auth, spec = None): + def list_prefix(self, auth, spec=None): """ List prefixes matching the `spec`. * `auth` [BaseAuth] @@ -3019,10 +2821,9 @@ def list_prefix(self, auth, spec = None): understanding. """ - self._logger.debug("list_prefix called; spec: %s" % unicode(spec)) - + self._logger.debug("list_prefix called; spec: %", spec) - if type(spec) is dict: + if isinstance(spec, dict): where, params = self._expand_prefix_spec(spec.copy(), 'inp.') else: raise NipapError("invalid prefix specification") @@ -3065,22 +2866,19 @@ def list_prefix(self, auth, spec = None): inp.expires FROM ip_net_plan inp JOIN ip_net_vrf vrf ON (inp.vrf_id = vrf.id) - LEFT JOIN ip_net_pool pool ON (inp.pool_id = pool.id) %s - ORDER BY vrf.rt NULLS FIRST, prefix""" % where + LEFT JOIN ip_net_pool pool ON (inp.pool_id = pool.id) """ + where + """ + ORDER BY vrf.rt NULLS FIRST, prefix""" self._execute(sql, params) res = list() for row in self._curs_pg: pref = dict(row) - pref['display_prefix'] = unicode(pref['display_prefix']) res.append(pref) return res - - - def _db_remove_prefix(self, spec, recursive = False): + def _db_remove_prefix(self, spec, recursive=False): """ Do the underlying database operations to delete a prefix """ if recursive: @@ -3093,13 +2891,11 @@ def _db_remove_prefix(self, spec, recursive = False): else: where, params = self._expand_prefix_spec(spec) - sql = "DELETE FROM ip_net_plan AS p WHERE %s" % where + sql = "DELETE FROM ip_net_plan AS p WHERE " + where self._execute(sql, params) - - @requires_rw - def remove_prefix(self, auth, spec, recursive = False): + def remove_prefix(self, auth, spec, recursive=False): """ Remove prefix matching `spec`. * `auth` [BaseAuth] @@ -3115,7 +2911,7 @@ def remove_prefix(self, auth, spec, recursive = False): understanding. """ - self._logger.debug("remove_prefix called; spec: %s" % unicode(spec)) + self._logger.debug("remove_prefix called; spec: %s", spec) # sanity check - do we have all attributes? if 'id' in spec: @@ -3149,35 +2945,33 @@ def remove_prefix(self, auth, spec, recursive = False): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source + 'authoritative_source': auth.authoritative_source, } for p in prefixes: audit_params['prefix_id'] = p['id'] audit_params['prefix_prefix'] = p['prefix'] - audit_params['description'] = 'Removed prefix %s' % p['prefix'] + audit_params['description'] = 'Removed prefix {}'.format(p['prefix']) audit_params['vrf_id'] = p['vrf_id'] audit_params['vrf_rt'] = p['vrf_rt'] audit_params['vrf_name'] = p['vrf_name'] sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) if p['pool_id'] is not None: - pool = self._get_pool(auth, { 'id': p['pool_id'] }) + pool = self._get_pool(auth, {'id': p['pool_id']}) audit_params2 = { 'pool_id': pool['id'], 'pool_name': pool['name'], 'prefix_id': p['id'], 'prefix_prefix': p['prefix'], - 'description': 'Prefix %s removed from pool %s' % (p['prefix'], pool['name']), + 'description': 'Prefix ' + p['prefix'] + ' removed from pool ' + pool['name'], 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source + 'authoritative_source': auth.authoritative_source, } sql, params = self._sql_expand_insert(audit_params2) - self._execute('INSERT INTO ip_net_log %s' % sql, params) - - + self._execute('INSERT INTO ip_net_log ' + sql, params) def search_prefix(self, auth, query, search_options=None): """ Search prefix list for prefixes matching `query`. @@ -3334,16 +3128,17 @@ def search_prefix(self, auth, query, search_options=None): search_options['include_all_parents'] = False else: if search_options['include_all_parents'] not in (True, False): - raise NipapValueError('Invalid value for option ' + - "'include_all_parents'. Only true and false valid. Supplied value :'%s'" % unicode(search_options['include_all_parents'])) + raise NipapValueError( + "Invalid value for option 'include_all_parents'. Only true and false valid. " + "Supplied value :'{}'".format(search_options['include_all_parents'])) # include_children if 'include_all_children' not in search_options: search_options['include_all_children'] = False else: if search_options['include_all_children'] not in (True, False): - raise NipapValueError('Invalid value for option ' + - "'include_all_children'. Only true and false valid. Supplied value: '%s'" % unicode(search_options['include_all_children'])) + raise NipapValueError("Invalid value for option 'include_all_children'. Only true and false valid. " + "Supplied value: '{}'".format(search_options['include_all_children'])) # parents_depth if 'parents_depth' not in search_options: @@ -3352,8 +3147,7 @@ def search_prefix(self, auth, query, search_options=None): try: search_options['parents_depth'] = int(search_options['parents_depth']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'parent_depth'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'parent_depth'. Only integer values allowed.") # children_depth if 'children_depth' not in search_options: @@ -3362,16 +3156,15 @@ def search_prefix(self, auth, query, search_options=None): try: search_options['children_depth'] = int(search_options['children_depth']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'children_depth'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'children_depth'. Only integer values allowed.") # include_neighbors if 'include_neighbors' not in search_options: search_options['include_neighbors'] = False else: if search_options['include_neighbors'] not in (True, False): - raise NipapValueError('Invalid value for option ' + - "'include_neighbors'. Only true and false valid. Supplied value: '%s'" % unicode(search_options['include_neighbors'])) + raise NipapValueError("Invalid value for option 'include_neighbors'. Only true and false valid. " + "Supplied value: '{}'".format(search_options['include_neighbors'])) # max_result if 'max_result' not in search_options: @@ -3383,8 +3176,7 @@ def search_prefix(self, auth, query, search_options=None): try: search_options['max_result'] = int(search_options['max_result']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'max_result'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'max_result'. Only integer values allowed.") # offset if 'offset' not in search_options: @@ -3393,26 +3185,23 @@ def search_prefix(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'offset'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'offset'. Only integer values allowed.") # parent_prefix - if ('parent_prefix' not in search_options or - search_options['parent_prefix'] is None): + if 'parent_prefix' not in search_options or search_options['parent_prefix'] is None: search_options['parent_prefix'] = None else: try: _ = int(search_options['parent_prefix']) except ValueError: - raise NipapValueError( - "Invalid value '%s' for option 'parent_prefix'. Must be the ID of a prefix." - % search_options['parent_prefix']) + raise NipapValueError("Invalid value '{}' for option 'parent_prefix'. " + "Must be the ID of a prefix.".format(search_options['parent_prefix'])) try: - parent_prefix = self.list_prefix(auth, { 'id': search_options['parent_prefix'] })[0] + parent_prefix = self.list_prefix(auth, {'id': search_options['parent_prefix']})[0] except IndexError: - raise NipapNonExistentError("Parent prefix %s can not be found" % search_options['parent_prefix']) + raise NipapNonExistentError("Parent prefix {} can not be found".format(search_options['parent_prefix'])) - self._logger.debug('search_prefix search_options: %s' % unicode(search_options)) + self._logger.debug('search_prefix search_options: %s', search_options) # translate search options to SQL @@ -3439,7 +3228,8 @@ def search_prefix(self, auth, query, search_options=None): vrf_id = 0 if parent_prefix['vrf_id']: vrf_id = parent_prefix['vrf_id'] - where_parent_prefix = " WHERE (p1.vrf_id = %s AND iprange(p1.prefix) <<= iprange('%s') AND p1.indent <= %s) " % (vrf_id, parent_prefix['prefix'], parent_prefix['indent'] + 1) + where_parent_prefix = " WHERE (p1.vrf_id = %s AND iprange(p1.prefix) <<= iprange('%s') AND " \ + "p1.indent <= %s) " % (vrf_id, parent_prefix['prefix'], parent_prefix['indent'] + 1) left_join = 'LEFT OUTER' else: where_parent_prefix = '' @@ -3450,7 +3240,8 @@ def search_prefix(self, auth, query, search_options=None): else: limit_string = "LIMIT %d" % (search_options['max_result'] + search_options['offset']) - display = '(p1.prefix << p2.display_prefix OR p2.prefix <<= p1.prefix %s) OR (p2.prefix >>= p1.prefix %s)' % (where_parents, where_children) + display = '(p1.prefix << p2.display_prefix OR p2.prefix <<= p1.prefix %s) OR (p2.prefix >>= ' \ + 'p1.prefix %s)' % (where_parents, where_children) where, opt = self._expand_prefix_query(query) sql = """ @@ -3558,8 +3349,8 @@ def search_prefix(self, auth, query, search_options=None): LEFT JOIN ip_net_pool AS pool ON (p1.pool_id = pool.id) -- possible set where conditions, if we are doing a parent_prefix operation """ + where_parent_prefix + """ - ORDER BY vrf_rt_order(vrf.rt) NULLS FIRST, p1.prefix, CASE WHEN p1.prefix = p2.prefix THEN 0 ELSE 1 END OFFSET """ + unicode(search_options['offset']) + ") AS a ORDER BY vrf_rt_order(vrf_rt) NULLS FIRST, prefix" - + ORDER BY vrf_rt_order(vrf.rt) NULLS FIRST, p1.prefix, CASE WHEN p1.prefix = p2.prefix THEN 0 ELSE 1 END + OFFSET """ + str(search_options['offset']) + ") AS a ORDER BY vrf_rt_order(vrf_rt) NULLS FIRST, prefix" self._execute(sql, opt) @@ -3578,9 +3369,7 @@ def search_prefix(self, auth, query, search_options=None): if len(result) >= int(search_options['max_result']): break - return { 'search_options': search_options, 'result': result } - - + return {'search_options': search_options, 'result': result} def smart_search_prefix(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search on prefix list. @@ -3627,7 +3416,7 @@ def smart_search_prefix(self, auth, query_str, search_options=None, extra_query= if search_options is None: search_options = {} - self._logger.debug("smart_search_prefix query string: %s" % query_str) + self._logger.debug("smart_search_prefix query string: %s", query_str) success, query = self._parse_prefix_query(query_str) if not success: @@ -3636,17 +3425,13 @@ def smart_search_prefix(self, auth, query_str, search_options=None, extra_query= 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed' + 'error_message': 'query interpretation failed', } if extra_query is not None: - query = { - 'operator': 'and', - 'val1': query, - 'val2': extra_query - } + query = {'operator': 'and', 'val1': query, 'val2': extra_query} - self._logger.debug("smart_search_prefix: query expanded to: %s" % unicode(query)) + self._logger.debug("smart_search_prefix: query expanded to: %s", query) search_result = self.search_prefix(auth, query, search_options) search_result['interpretation'] = query @@ -3654,8 +3439,6 @@ def smart_search_prefix(self, auth, query_str, search_options=None, extra_query= return search_result - - def _parse_prefix_query(self, query_str): """ Parse a smart search query for prefixes @@ -3666,20 +3449,18 @@ def _parse_prefix_query(self, query_str): query = sp.parse(query_str) return query - - # # ASN functions # - def _expand_asn_query(self, query, table_name = None): + def _expand_asn_query(self, query, table_name=None): """ Expand ASN query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ - where = unicode() + where = str() opt = list() # handle table name, can be None @@ -3688,16 +3469,16 @@ def _expand_asn_query(self, query, table_name = None): else: col_prefix = table_name + "." - if type(query['val1']) == dict and type(query['val2']) == dict: + if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_asn_query(query['val1'], table_name) sub_where2, opt2 = self._expand_asn_query(query['val2'], table_name) try: - where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) + where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: - raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) opt += opt1 opt += opt2 @@ -3712,7 +3493,7 @@ def _expand_asn_query(self, query, table_name = None): asn_attr['name'] = 'name' if query['val1'] not in asn_attr: - raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) + raise NipapInputError("Search variable '{}' unknown".format(query['val1'])) # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: @@ -3722,19 +3503,14 @@ def _expand_asn_query(self, query, table_name = None): # build where clause if query['operator'] not in _operation_map: - raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) - where = unicode(" %s%s %s %%s " % - ( col_prefix, asn_attr[query['val1']], - _operation_map[query['operator']] ) - ) + where = " %s%s %s %%s " % (col_prefix, asn_attr[query['val1']], _operation_map[query['operator']]) opt.append(query['val2']) return where, opt - - def _expand_asn_spec(self, spec): """ Expand ASN specification to SQL. @@ -3745,21 +3521,21 @@ def _expand_asn_spec(self, spec): name of ASN """ - if type(spec) is not dict: + if not isinstance(spec, dict): raise NipapInputError("asn specification must be a dict") allowed_values = ['asn', 'name'] for a in spec: if a not in allowed_values: - raise NipapExtraneousInputError("extraneous specification key %s" % a) + raise NipapExtraneousInputError("extraneous specification key {}".format(a)) if 'asn' in spec: - if type(spec['asn']) not in (int, long): + if not isinstance(spec['asn'], int): raise NipapValueError("asn specification key 'asn' must be an integer") if 'name' in spec: raise NipapExtraneousInputError("asn specification contain both 'asn' and 'name', specify asn or name") elif 'name' in spec: - if type(spec['name']) != type(''): + if not isinstance(spec['name'], str): raise NipapValueError("asn specification key 'name' must be a string") if 'asn' in spec: raise NipapExtraneousInputError("asn specification contain both 'asn' and 'name', specify asn or name") @@ -3768,8 +3544,6 @@ def _expand_asn_spec(self, spec): return where, params - - def list_asn(self, auth, asn=None): """ List AS numbers matching `spec`. @@ -3790,7 +3564,7 @@ def list_asn(self, auth, asn=None): if asn is None: asn = {} - self._logger.debug("list_asn called; asn: %s" % unicode(asn)) + self._logger.debug("list_asn called; asn: %s", asn) sql = "SELECT * FROM ip_net_asn" params = list() @@ -3809,8 +3583,6 @@ def list_asn(self, auth, asn=None): return res - - @requires_rw def add_asn(self, auth, attr): """ Add AS number to NIPAP. @@ -3828,18 +3600,18 @@ def add_asn(self, auth, attr): understanding. """ - self._logger.debug("add_asn called; attr: %s" % unicode(attr)) + self._logger.debug("add_asn called; attr: %s", attr) # sanity check - do we have all attributes? - req_attr = [ 'asn', ] - allowed_attr = [ 'asn', 'name' ] + req_attr = ['asn',] + allowed_attr = ['asn', 'name'] self._check_attr(attr, req_attr, allowed_attr) insert, params = self._sql_expand_insert(attr) sql = "INSERT INTO ip_net_asn " + insert self._execute(sql, params) - asn = self.list_asn(auth, { 'asn': attr['asn'] })[0] + asn = self.list_asn(auth, {'asn': attr['asn']})[0] # write to audit table audit_params = { @@ -3847,16 +3619,14 @@ def add_asn(self, auth, attr): 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Added ASN %s with attr: %s' % (attr['asn'], unicode(attr)) + 'description': 'Added ASN %s with attr: %s' % (attr['asn'], attr) } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return asn - - @requires_rw def edit_asn(self, auth, asn, attr): """ Edit AS number @@ -3874,19 +3644,18 @@ def edit_asn(self, auth, asn, attr): understanding. """ - self._logger.debug("edit_asn called; asn: %s attr: %s" % - (unicode(asn), unicode(attr))) + self._logger.debug("edit_asn called; asn: %s attr: %s", asn, attr) # sanity check - do we have all attributes? - req_attr = [ ] - allowed_attr = [ 'name', ] + req_attr = [] + allowed_attr = ['name',] self._check_attr(attr, req_attr, allowed_attr) asns = self.list_asn(auth, asn) where, params1 = self._expand_asn_spec(asn) update, params2 = self._sql_expand_update(attr) - params = dict(params2.items() + params1.items()) + params = dict(list(params2.items()) + list(params1.items())) sql = "UPDATE ip_net_asn SET " + update + " WHERE " + where sql += " RETURNING *" @@ -3902,17 +3671,15 @@ def edit_asn(self, auth, asn, attr): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source + 'authoritative_source': auth.authoritative_source, + 'description': 'Edited ASN %s attr: %s' % (a['asn'], attr), } - audit_params['description'] = 'Edited ASN %s attr: %s' % (unicode(a['asn']), unicode(attr)) sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) + self._execute('INSERT INTO ip_net_log ' + sql, params) return updated_asns - - @requires_rw def remove_asn(self, auth, asn): """ Remove an AS number. @@ -3930,7 +3697,7 @@ def remove_asn(self, auth, asn): understanding. """ - self._logger.debug("remove_asn called; asn: %s" % unicode(asn)) + self._logger.debug("remove_asn called; asn: %s", asn) # get list of ASNs to remove before removing them asns = self.list_asn(auth, asn) @@ -3944,15 +3711,13 @@ def remove_asn(self, auth, asn): for a in asns: audit_params = { 'username': auth.username, - 'authenticated_as': auth.authenticated_as, + 'authenticated_as': auth.authenticated_ass, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, - 'description': 'Removed ASN %s' % unicode(a['asn']) + 'description': 'Removed ASN %s' % a['asn'] } sql, params = self._sql_expand_insert(audit_params) - self._execute('INSERT INTO ip_net_log %s' % sql, params) - - + self._execute('INSERT INTO ip_net_log ' + sql, params) def search_asn(self, auth, query, search_options=None): """ Search ASNs for entries matching 'query' @@ -4029,10 +3794,9 @@ def search_asn(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'offset'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option'offset'. Only integer values allowed.") - self._logger.debug('search_asn search_options: %s' % unicode(search_options)) + self._logger.debug('search_asn search_options: %s', search_options) opt = None sql = """ SELECT * FROM ip_net_asn """ @@ -4043,16 +3807,14 @@ def search_asn(self, auth, query, search_options=None): where, opt = self._expand_asn_query(query) sql += " WHERE " + where - sql += " ORDER BY asn LIMIT " + unicode(search_options['max_result']) + sql += " ORDER BY asn LIMIT %s" % search_options['max_result'] self._execute(sql, opt) result = list() for row in self._curs_pg: result.append(dict(row)) - return { 'search_options': search_options, 'result': result } - - + return {'search_options': search_options, 'result': result} def smart_search_asn(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search operation among AS numbers @@ -4095,26 +3857,22 @@ def smart_search_asn(self, auth, query_str, search_options=None, extra_query=Non if search_options is None: search_options = {} - self._logger.debug("smart_search_asn called; query_str: %s" % query_str) + self._logger.debug("smart_search_asn called; query_str: %s", query_str) success, query = self._parse_asn_query(query_str) if not success: return { - 'interpretation': query, - 'search_options': search_options, - 'result': [], - 'error': True, - 'error_message': 'query interpretaion failed' + 'interpretation': query, + 'search_options': search_options, + 'result': [], + 'error': True, + 'error_message': 'query interpretaion failed', } if extra_query is not None: - query = { - 'operator': 'and', - 'val1': query, - 'val2': extra_query - } + query = {'operator': 'and', 'val1': query, 'val2': extra_query} - self._logger.debug("smart_search_asn; query expanded to: %s" % unicode(query)) + self._logger.debug("smart_search_asn; query expanded to: %s", query) search_result = self.search_asn(auth, query, search_options) search_result['interpretation'] = query @@ -4122,8 +3880,6 @@ def smart_search_asn(self, auth, query_str, search_options=None, extra_query=Non return search_result - - def _parse_asn_query(self, query_str): """ Parse a smart search query for ASNs @@ -4185,25 +3941,23 @@ def _parse_asn_query(self, query_str): }, 'operator': 'and', 'val1': query_part, - 'val2': query + 'val2': query, } return True, query - - # # Tag functions # - def _expand_tag_query(self, query, table_name = None): + def _expand_tag_query(self, query, table_name=None): """ Expand Tag query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ - where = unicode() + where = str() opt = list() # handle table name, can be None @@ -4212,16 +3966,16 @@ def _expand_tag_query(self, query, table_name = None): else: col_prefix = table_name + "." - if type(query['val1']) == dict and type(query['val2']) == dict: + if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_tag_query(query['val1'], table_name) sub_where2, opt2 = self._expand_tag_query(query['val2'], table_name) try: - where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) + where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: - raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) opt += opt1 opt += opt2 @@ -4235,7 +3989,7 @@ def _expand_tag_query(self, query, table_name = None): tag_attr['name'] = 'name' if query['val1'] not in tag_attr: - raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) + raise NipapInputError("Search variable '{}' unknown".format(query['val1'])) # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: @@ -4245,19 +3999,14 @@ def _expand_tag_query(self, query, table_name = None): # build where clause if query['operator'] not in _operation_map: - raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) + raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) - where = unicode(" %s%s %s %%s " % - ( col_prefix, tag_attr[query['val1']], - _operation_map[query['operator']] ) - ) + where = " %s%s %s %%s " % (col_prefix, tag_attr[query['val1']], _operation_map[query['operator']]) opt.append(query['val2']) return where, opt - - def search_tag(self, auth, query, search_options=None): """ Search Tags for entries matching 'query' @@ -4323,8 +4072,7 @@ def search_tag(self, auth, query, search_options=None): try: search_options['max_result'] = int(search_options['max_result']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'max_result'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'max_result'. Only integer values allowed.") # offset if 'offset' not in search_options: @@ -4333,32 +4081,26 @@ def search_tag(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError('Invalid value for option' + - ''' 'offset'. Only integer values allowed.''') + raise NipapValueError("Invalid value for option 'offset'. Only integer values allowed.") - self._logger.debug('search_tag search_options: %s' % unicode(search_options)) + self._logger.debug('search_tag search_options: %s', search_options) opt = None - sql = """ SELECT * FROM (SELECT DISTINCT unnest(tags) AS name FROM - ip_net_plan) AS a """ + sql = """ SELECT * FROM (SELECT DISTINCT unnest(tags) AS name FROM ip_net_plan) AS a """ # add where clause if we have any search terms if query != {}: - where, opt = self._expand_tag_query(query) sql += " WHERE " + where - sql += " ORDER BY name LIMIT " + unicode(search_options['max_result']) + sql += " ORDER BY name LIMIT %s" % search_options['max_result'] self._execute(sql, opt) result = list() for row in self._curs_pg: result.append(dict(row)) - return { 'search_options': search_options, 'result': result } - - - + return {'search_options': search_options, 'result': result} # vim: et ts=4 : diff --git a/nipap/nipap/daemon.py b/nipap/nipap/daemon.py index c721d027f..215e9d3b3 100644 --- a/nipap/nipap/daemon.py +++ b/nipap/nipap/daemon.py @@ -22,8 +22,8 @@ __version__ = "0.2" # Standard Python modules. -import os # Miscellaneous OS interfaces. -import sys # System-specific parameters and functions. +import os # Miscellaneous OS interfaces. +import sys # System-specific parameters and functions. # Default daemon parameters. # File mode creation mask of the daemon. @@ -36,161 +36,163 @@ MAXFD = 1024 # The standard I/O file descriptors are redirected to /dev/null by default. -if (hasattr(os, "devnull")): - REDIRECT_TO = os.devnull +if hasattr(os, "devnull"): + REDIRECT_TO = os.devnull else: - REDIRECT_TO = "/dev/null" + REDIRECT_TO = "/dev/null" + def createDaemon(): - """Detach a process from the controlling terminal and run it in the - background as a daemon. - """ - - try: - # Fork a child process so the parent can exit. This returns control to - # the command-line or shell. It also guarantees that the child will not - # be a process group leader, since the child receives a new process ID - # and inherits the parent's process group ID. This step is required - # to insure that the next call to os.setsid is successful. - pid = os.fork() - except OSError as exc: - raise Exception, "%s [%d]" % (exc.strerror, exc.errno) - - if (pid == 0): # The first child. - # To become the session leader of this new session and the process group - # leader of the new process group, we call os.setsid(). The process is - # also guaranteed not to have a controlling terminal. - os.setsid() - - # Is ignoring SIGHUP necessary? - # - # It's often suggested that the SIGHUP signal should be ignored before - # the second fork to avoid premature termination of the process. The - # reason is that when the first child terminates, all processes, e.g. - # the second child, in the orphaned group will be sent a SIGHUP. - # - # "However, as part of the session management system, there are exactly - # two cases where SIGHUP is sent on the death of a process: - # - # 1) When the process that dies is the session leader of a session that - # is attached to a terminal device, SIGHUP is sent to all processes - # in the foreground process group of that terminal device. - # 2) When the death of a process causes a process group to become - # orphaned, and one or more processes in the orphaned group are - # stopped, then SIGHUP and SIGCONT are sent to all members of the - # orphaned group." [2] - # - # The first case can be ignored since the child is guaranteed not to have - # a controlling terminal. The second case isn't so easy to dismiss. - # The process group is orphaned when the first child terminates and - # POSIX.1 requires that every STOPPED process in an orphaned process - # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the - # second child is not STOPPED though, we can safely forego ignoring the - # SIGHUP signal. In any case, there are no ill-effects if it is ignored. - # - # import signal # Set handlers for asynchronous events. - # signal.signal(signal.SIGHUP, signal.SIG_IGN) - - try: - # Fork a second child and exit immediately to prevent zombies. This - # causes the second child process to be orphaned, making the init - # process responsible for its cleanup. And, since the first child is - # a session leader without a controlling terminal, it's possible for - # it to acquire one by opening a terminal in the future (System V- - # based systems). This second fork guarantees that the child is no - # longer a session leader, preventing the daemon from ever acquiring - # a controlling terminal. - pid = os.fork() # Fork a second child. - except OSError as exc: - raise Exception, "%s [%d]" % (exc.strerror, exc.errno) - - if (pid == 0): # The second child. - # Since the current working directory may be a mounted filesystem, we - # avoid the issue of not being able to unmount the filesystem at - # shutdown time by changing it to the root directory. - os.chdir(WORKDIR) - # We probably don't want the file mode creation mask inherited from - # the parent, so we give the child complete control over permissions. - os.umask(UMASK) - else: - # exit() or _exit()? See below. - os._exit(0) # Exit parent (the first child) of the second child. - else: - # exit() or _exit()? - # _exit is like exit(), but it doesn't call any functions registered - # with atexit (and on_exit) or any registered signal handlers. It also - # closes any open file descriptors. Using exit() may cause all stdio - # streams to be flushed twice and any temporary files may be unexpectedly - # removed. It's therefore recommended that child branches of a fork() - # and the parent branch(es) of a daemon use _exit(). - os._exit(0) # Exit parent of the first child. - - # Close all open file descriptors. This prevents the child from keeping - # open any file descriptors inherited from the parent. There is a variety - # of methods to accomplish this task. Three are listed below. - # - # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum - # number of open file descriptors to close. If it doesn't exists, use - # the default value (configurable). - # - # try: - # maxfd = os.sysconf("SC_OPEN_MAX") - # except (AttributeError, ValueError): - # maxfd = MAXFD - # - # OR - # - # if (os.sysconf_names.has_key("SC_OPEN_MAX")): - # maxfd = os.sysconf("SC_OPEN_MAX") - # else: - # maxfd = MAXFD - # - # OR - # - # Use the getrlimit method to retrieve the maximum file descriptor number - # that can be opened by this process. If there is not limit on the - # resource, use the default value. - # - import resource # Resource usage information. - maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] - if (maxfd == resource.RLIM_INFINITY): - maxfd = MAXFD - - # FIXME: this breaks our tpxmld, so it's commented for now //kll - # Iterate through and close all file descriptors. -# for fd in range(0, maxfd): -# try: -# os.close(fd) -# except OSError: # ERROR, fd wasn't open to begin with (ignored) -# pass - - # Redirect the standard I/O file descriptors to the specified file. Since - # the daemon has no controlling terminal, most daemons redirect stdin, - # stdout, and stderr to /dev/null. This is done to prevent side-effects - # from reads and writes to the standard I/O file descriptors. - - # This call to open is guaranteed to return the lowest file descriptor, - # which will be 0 (stdin), since it was closed above. - os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) - - # Duplicate standard input to standard output and standard error. - os.dup2(0, 1) # standard output (1) - os.dup2(0, 2) # standard error (2) - - return(0) + """Detach a process from the controlling terminal and run it in the + background as a daemon. + """ + + try: + # Fork a child process so the parent can exit. This returns control to + # the command-line or shell. It also guarantees that the child will not + # be a process group leader, since the child receives a new process ID + # and inherits the parent's process group ID. This step is required + # to insure that the next call to os.setsid is successful. + pid = os.fork() + except OSError as exc: + raise Exception("{} [{0:d}]".format(exc.strerror, exc.errno)) + + if pid == 0: # The first child. + # To become the session leader of this new session and the process group + # leader of the new process group, we call os.setsid(). The process is + # also guaranteed not to have a controlling terminal. + os.setsid() + + # Is ignoring SIGHUP necessary? + # + # It's often suggested that the SIGHUP signal should be ignored before + # the second fork to avoid premature termination of the process. The + # reason is that when the first child terminates, all processes, e.g. + # the second child, in the orphaned group will be sent a SIGHUP. + # + # "However, as part of the session management system, there are exactly + # two cases where SIGHUP is sent on the death of a process: + # + # 1) When the process that dies is the session leader of a session that + # is attached to a terminal device, SIGHUP is sent to all processes + # in the foreground process group of that terminal device. + # 2) When the death of a process causes a process group to become + # orphaned, and one or more processes in the orphaned group are + # stopped, then SIGHUP and SIGCONT are sent to all members of the + # orphaned group." [2] + # + # The first case can be ignored since the child is guaranteed not to have + # a controlling terminal. The second case isn't so easy to dismiss. + # The process group is orphaned when the first child terminates and + # POSIX.1 requires that every STOPPED process in an orphaned process + # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the + # second child is not STOPPED though, we can safely forego ignoring the + # SIGHUP signal. In any case, there are no ill-effects if it is ignored. + # + # import signal # Set handlers for asynchronous events. + # signal.signal(signal.SIGHUP, signal.SIG_IGN) + + try: + # Fork a second child and exit immediately to prevent zombies. This + # causes the second child process to be orphaned, making the init + # process responsible for its cleanup. And, since the first child is + # a session leader without a controlling terminal, it's possible for + # it to acquire one by opening a terminal in the future (System V- + # based systems). This second fork guarantees that the child is no + # longer a session leader, preventing the daemon from ever acquiring + # a controlling terminal. + pid = os.fork() # Fork a second child. + except OSError as exc: + raise Exception("{} [{0:d}]".format(exc.strerror, exc.errno)) + + if pid == 0: # The second child. + # Since the current working directory may be a mounted filesystem, we + # avoid the issue of not being able to unmount the filesystem at + # shutdown time by changing it to the root directory. + os.chdir(WORKDIR) + # We probably don't want the file mode creation mask inherited from + # the parent, so we give the child complete control over permissions. + os.umask(UMASK) + else: + # exit() or _exit()? See below. + os._exit(0) # Exit parent (the first child) of the second child. + else: + # exit() or _exit()? + # _exit is like exit(), but it doesn't call any functions registered + # with atexit (and on_exit) or any registered signal handlers. It also + # closes any open file descriptors. Using exit() may cause all stdio + # streams to be flushed twice and any temporary files may be unexpectedly + # removed. It's therefore recommended that child branches of a fork() + # and the parent branch(es) of a daemon use _exit(). + os._exit(0) # Exit parent of the first child. + + # Close all open file descriptors. This prevents the child from keeping + # open any file descriptors inherited from the parent. There is a variety + # of methods to accomplish this task. Three are listed below. + # + # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum + # number of open file descriptors to close. If it doesn't exists, use + # the default value (configurable). + # + # try: + # maxfd = os.sysconf("SC_OPEN_MAX") + # except (AttributeError, ValueError): + # maxfd = MAXFD + # + # OR + # + # if (os.sysconf_names.has_key("SC_OPEN_MAX")): + # maxfd = os.sysconf("SC_OPEN_MAX") + # else: + # maxfd = MAXFD + # + # OR + # + # Use the getrlimit method to retrieve the maximum file descriptor number + # that can be opened by this process. If there is not limit on the + # resource, use the default value. + # + import resource # Resource usage information. + + maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] + if maxfd == resource.RLIM_INFINITY: + maxfd = MAXFD + + # FIXME: this breaks our tpxmld, so it's commented for now //kll + # Iterate through and close all file descriptors. + # for fd in range(0, maxfd): + # try: + # os.close(fd) + # except OSError: # ERROR, fd wasn't open to begin with (ignored) + # pass + + # Redirect the standard I/O file descriptors to the specified file. Since + # the daemon has no controlling terminal, most daemons redirect stdin, + # stdout, and stderr to /dev/null. This is done to prevent side-effects + # from reads and writes to the standard I/O file descriptors. + + # This call to open is guaranteed to return the lowest file descriptor, + # which will be 0 (stdin), since it was closed above. + os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) + + # Duplicate standard input to standard output and standard error. + os.dup2(0, 1) # standard output (1) + os.dup2(0, 2) # standard error (2) + + return 0 -if __name__ == "__main__": - retCode = createDaemon() +if __name__ == "__main__": + retCode = createDaemon() - # The code, as is, will create a new file in the root directory, when - # executed with superuser privileges. The file will contain the following - # daemon related process parameters: return code, process ID, parent - # process group ID, session ID, user ID, effective user ID, real group ID, - # and the effective group ID. Notice the relationship between the daemon's - # process ID, process group ID, and its parent's process ID. + # The code, as is, will create a new file in the root directory, when + # executed with superuser privileges. The file will contain the following + # daemon related process parameters: return code, process ID, parent + # process group ID, session ID, user ID, effective user ID, real group ID, + # and the effective group ID. Notice the relationship between the daemon's + # process ID, process group ID, and its parent's process ID. - procParams = """ + procParams = """ return code = %s process ID = %s parent process ID = %s @@ -201,9 +203,8 @@ def createDaemon(): real group ID = %s effective group ID = %s """ % (retCode, os.getpid(), os.getppid(), os.getpgrp(), os.getsid(0), - os.getuid(), os.geteuid(), os.getgid(), os.getegid()) - - open("createDaemon.log", "w").write(procParams + "\n") + os.getuid(), os.geteuid(), os.getgid(), os.getegid()) - sys.exit(retCode) + open("createDaemon.log", "w").write(procParams + "\n") + sys.exit(retCode) diff --git a/nipap/nipap/errors.py b/nipap/nipap/errors.py index 6cecf4475..b91514f8b 100644 --- a/nipap/nipap/errors.py +++ b/nipap/nipap/errors.py @@ -1,4 +1,3 @@ - class NipapError(Exception): """ NIPAP base error class. """ diff --git a/nipap/nipap/nipapconfig.py b/nipap/nipap/nipapconfig.py index ca89c7349..00591bf6f 100644 --- a/nipap/nipap/nipapconfig.py +++ b/nipap/nipap/nipapconfig.py @@ -1,12 +1,12 @@ -import ConfigParser +import configparser -class NipapConfig(ConfigParser.SafeConfigParser): +class NipapConfig(configparser.ConfigParser): """ Makes configuration data available. Implemented as a class with a shared state; once an instance has been created, new instances with the same state can be obtained by calling - the custructor again. + the constructor again. """ __shared_state = {} @@ -26,12 +26,10 @@ def __init__(self, cfg_path=None, default=None): # First time - create new instance! self._cfg_path = cfg_path - ConfigParser.SafeConfigParser.__init__(self, default) + configparser.ConfigParser.__init__(self, default, inline_comment_prefixes=";#") self.read_file() - - def read_file(self): """ Read the configuration file """ @@ -41,12 +39,10 @@ def read_file(self): return try: - cfg_fp = open(self._cfg_path, 'r') - self.readfp(cfg_fp) + self.read([self._cfg_path]) except IOError as exc: raise NipapConfigError(str(exc)) - class NipapConfigError(Exception): pass diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 01468c744..1bd91e09c 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -1,15 +1,16 @@ -#!/usr/bin/python +#!/usr/bin/python3 # -*- coding: utf-8 -*- -from itertools import izip_longest +from itertools import zip_longest import logging import re import IPy -from pyparsing import Combine, Forward, Group, Literal, nestedExpr, OneOrMore, ParseResults, quotedString, Regex, QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf +from pyparsing import Combine, Forward, Group, Literal, nestedExpr, OneOrMore, ParseResults, quotedString, Regex, \ + QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf -from errors import * +from .errors import * class SmartParser: @@ -21,7 +22,6 @@ class SmartParser: def __init__(self): self._logger = logging.getLogger(self.__class__.__name__) - def _is_ipv4(self, ip): """ Return true if given arg is a valid IPv4 address """ @@ -34,7 +34,6 @@ def _is_ipv4(self, ip): return True return False - def _is_ipv6(self, ip): """ Return true if given arg is a valid IPv6 address """ @@ -47,12 +46,11 @@ def _is_ipv6(self, ip): return True return False - def _get_afi(self, ip): """ Return address-family (4 or 6) for IP or None if invalid address """ - parts = unicode(ip).split("/") + parts = ip.split("/") if len(parts) == 1: # just an address if self._is_ipv4(ip): @@ -66,17 +64,17 @@ def _get_afi(self, ip): try: pl = int(parts[1]) except ValueError: - # if casting parts[1] to int failes, this is not a prefix.. + # if casting parts[1] to int fails, this is not a prefix.. return None if self._is_ipv4(parts[0]): - if pl >= 0 and pl <= 32: + if 0 <= pl <= 32: # prefix mask must be between 0 and 32 return 4 # otherwise error return None elif self._is_ipv6(parts[0]): - if pl >= 0 and pl <= 128: + if 0 <= pl <= 128: # prefix mask must be between 0 and 128 return 6 # otherwise error @@ -87,7 +85,6 @@ def _get_afi(self, ip): # more than two parts.. this is neither an address or a prefix return None - def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ @@ -98,26 +95,44 @@ def _string_to_ast(self, input_string): # we try to be Unicode / internationally friendly we need to match much # much more. Trying to expand a word class to catch it all seems futile # so we match on everything *except* a few things, like our operators - comp_word = Regex("[^*\s=><~!]+") - word = Regex("[^*\s=><~!]+").setResultsName('word') + comp_word = Regex(r"[^*\s=><~!]+") + word = Regex(r"[^*\s=><~!]+").setResultsName('word') # numbers comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") - comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)) - ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') + comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct * 3)) + ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct * 3)).setResultsName('ipv4_address') # IPv6 address - ipv6_address = Regex("((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?").setResultsName('ipv6_address') - ipv6_prefix = Combine(ipv6_address + Regex("/(12[0-8]|1[01][0-9]|[0-9][0-9]?)")).setResultsName('ipv6_prefix') + ipv6_address = Regex( + r"((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|" + r"(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|" + r":((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|" + r"(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|" + r"((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|" + r":))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|" + r"((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|" + r":))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|" + r"((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|" + r":))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|" + r"((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|" + r":))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|" + r"((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|" + r":)))(%.+)?" + ).setResultsName('ipv6_address') + ipv6_prefix = Combine( + ipv6_address + Regex("/(12[0-8]|1[01][0-9]|[0-9][0-9]?)") + ).setResultsName('ipv6_prefix') # VRF RTs of the form number:number vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags - tags = Combine( Literal('#') + comp_word).setResultsName('tag') + tags = Combine(Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') @@ -135,26 +150,21 @@ def _string_to_ast(self, input_string): enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) - enclosed << ( - parens | atom - ).setResultsName('nested') + enclosed << (parens | atom).setResultsName('nested') content = Forward() - content << ( - ZeroOrMore(enclosed) - ) + content << (ZeroOrMore(enclosed)) res = content.parseString(input_string) return res - def _ast_to_dictsql(self, input_ast): """ """ # Add implicit AND operator between expressions if there is no explicit # operator specified. ast = [] - for token, lookahead in izip_longest(input_ast, input_ast[1:]): + for token, lookahead in zip_longest(input_ast, input_ast[1:]): if token.getName() == "boolean": # only add boolean operator if it is NOT the last token if lookahead is not None: @@ -169,27 +179,22 @@ def _ast_to_dictsql(self, input_ast): # if next token is NOT a boolean, add implicit AND ast.append(ParseResults('and', 'boolean')) - # dictSql stack - dss = { - 'operator': None, - 'val1': None, - 'val2': None - } + dss = {'operator': None, 'val1': None, 'val2': None} success = True dse = None - for part, lookahead in izip_longest(ast, ast[1:]): - self._logger.debug("part: %s %s" % (part, part.getName())) + for part, lookahead in zip_longest(ast, ast[1:]): + self._logger.debug("part: {} {}".format(part, part.getName())) # handle operators joining together expressions if part.getName() == 'boolean': op = part[0].lower() dss['operator'] = op dss['interpretation'] = { - 'interpretation': op, - 'operator': op, - 'error': False - } + 'interpretation': op, + 'operator': op, + 'error': False, + } continue # string expr that we expand to dictsql expression @@ -206,25 +211,20 @@ def _ast_to_dictsql(self, input_ast): elif part.getName() in ('ipv6_prefix', 'ipv6_address', 'word', 'tag', 'vrf_rt', 'quoted_string'): # dict sql expression dse = self._string_to_dictsql(part) - self._logger.debug('string part: %s => %s' % (part, dse)) + self._logger.debug('string part: %s => %s', part, dse) else: - raise ParserError("Unhandled part in AST: %s %s" % (part, - part.getName())) + raise ParserError("Unhandled part in AST: {} {}".format(part, part.getName())) if dss['val1'] is None: - self._logger.debug('val1 not set, using dse: %s' % unicode(dse)) + self._logger.debug('val1 not set, using dse: %s', dse) dss['val1'] = dse else: - self._logger.debug("val1 is set, operator is '%s', val2 = dst: %s" % (dss['operator'], unicode(dse))) + self._logger.debug("val1 is set, operator is '%s', val2 = dst: %s", dss['operator'], dse) dss['val2'] = dse if lookahead is not None: if dss['val1'] is not None and dss['val2'] is not None: - dss = { - 'operator': None, - 'val1': dss, - 'val2': None - } + dss = {'operator': None, 'val1': dss, 'val2': None} # special handling when AST is only one expression, then we overwrite # the dss with dse @@ -236,13 +236,11 @@ def _ast_to_dictsql(self, input_ast): # return the final composed stack of dictsql expressions return success, dss - def _string_to_dictsql(self, string): """ Do magic matching of single words or quoted string """ raise NotImplementedError() - def _parse_expr(self, part): """ Parse matching expression in form key value @@ -250,25 +248,25 @@ def _parse_expr(self, part): vlan > 1 node = FOO-BAR """ - self._logger.debug("parsing expression: " + unicode(part)) + self._logger.debug("parsing expression: %s", part) key, op, val = part success = True dictsql = { + 'operator': op, + 'val1': key, + 'val2': val, + 'interpretation': { + 'string': key + op + val, + 'interpretation': 'expression', + 'attribute': key, 'operator': op, - 'val1': key, - 'val2': unicode(val), - 'interpretation': { - 'string': key + op + val, - 'interpretation': 'expression', - 'attribute': key, - 'operator': op, - 'error': False - } - } + 'error': False, + }, + } if key in self.attributes: - if type(self.attributes[key]) is list: + if isinstance(self.attributes[key], list): if val not in self.attributes[key]: dictsql['interpretation']['error'] = True dictsql['interpretation']['error_message'] = 'invalid value' @@ -281,8 +279,6 @@ def _parse_expr(self, part): return success, dictsql - - def parse(self, input_string): # check for unclosed quotes/parentheses paired_exprs = nestedExpr('(', ')') | quotedString @@ -298,8 +294,8 @@ def parse(self, input_string): 'attribute': 'text', 'operator': None, 'error': True, - 'error_message': None - } + 'error_message': None, + }, } if '"' in stripped_line or "'" in stripped_line: @@ -313,7 +309,6 @@ def parse(self, input_string): return self._ast_to_dictsql(ast) - class PoolSmartParser(SmartParser): attributes = { 'default_type': True, @@ -336,94 +331,88 @@ class PoolSmartParser(SmartParser): 'used_prefixes_v4': True, 'used_prefixes_v6': True, 'vrf': True, - } - + } def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string """ - self._logger.debug("parsing string: " + unicode(part[0]) + " of type: " + part.getName()) + self._logger.debug("parsing string: %s of type: %s", part[0], part.getName()) if part.getName() == 'tag': - self._logger.debug("Query part '" + part[0] + "' interpreted as tag") + self._logger.debug("Query part '%s' interpreted as tag", part[0]) dictsql = { - 'interpretation': { - 'string': part[0], - 'interpretation': 'tag', - 'attribute': 'tag', - 'operator': 'equals_any', - 'error': False - }, + 'interpretation': { + 'string': part[0], + 'interpretation': 'tag', + 'attribute': 'tag', 'operator': 'equals_any', - 'val1': 'tags', - 'val2': part[0][1:] - } + 'error': False, + }, + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': part[0][1:], + } elif part.getName() == 'vrf_rt': - self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT") + self._logger.debug("Query part '%s' interpreted as VRF RT", part.vrf_rt) # TODO: enable this, our fancy new interpretation - dictsql = { - 'interpretation': { - 'attribute': 'VRF RT', - 'interpretation': 'vrf_rt', - 'operator': 'equals', - 'string': part.vrf_rt, - 'error': False - }, - 'operator': 'equals', - 'val1': 'vrf_rt', - 'val2': part.vrf_rt - } + # dictsql = { + # 'interpretation': { + # 'attribute': 'VRF RT', + # 'interpretation': 'vrf_rt', + # 'operator': 'equals', + # 'string': part.vrf_rt, + # 'error': False, + # }, + # 'operator': 'equals', + # 'val1': 'vrf_rt', + # 'val2': part.vrf_rt, + # } # using old interpretation for the time being to make sure we align # with old smart search interpreter dictsql = { - 'interpretation': { - 'attribute': 'name or description', - 'interpretation': 'text', - 'operator': 'regex', - 'string': part.vrf_rt, - 'error': False - }, - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': part.vrf_rt - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': part.vrf_rt - } - } + 'interpretation': { + 'attribute': 'name or description', + 'interpretation': 'text', + 'operator': 'regex', + 'string': part.vrf_rt, + 'error': False, + }, + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part.vrf_rt, + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part.vrf_rt, + }, + } else: - self._logger.debug("Query part '" + part[0] + "' interpreted as text") + self._logger.debug("Query part '%s' interpreted as text", part[0]) dictsql = { - 'interpretation': { - 'attribute': 'name or description', - 'interpretation': 'text', - 'operator': 'regex', - 'string': part[0], - 'error': False - }, - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': part[0] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': part[0] - } - } + 'interpretation': { + 'attribute': 'name or description', + 'interpretation': 'text', + 'operator': 'regex', + 'string': part[0], + 'error': False, + }, + 'operator': 'or', + 'val1': {'operator': 'regex_match', 'val1': 'name', 'val2': part[0]}, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part[0], + }, + } return dictsql - class PrefixSmartParser(SmartParser): attributes = { 'added': True, @@ -455,77 +444,73 @@ class PrefixSmartParser(SmartParser): 'used_addreses': True, 'vlan': True, 'vrf': True, - } + } def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string """ - self._logger.debug("parsing string: " + unicode(part[0]) + " of type: " + part.getName()) + self._logger.debug("parsing string: %s of type: %s", part[0], part.getName()) if part.getName() == 'tag': - self._logger.debug("Query part '" + part[0] + "' interpreted as tag") + self._logger.debug("Query part '%s' interpreted as tag", part[0]) dictsql = { - 'interpretation': { - 'string': part[0], - 'interpretation': '(inherited) tag', - 'attribute': 'tag', - 'operator': 'equals_any', - 'error': False - }, - 'operator': 'or', - 'val1': { - 'operator': 'equals_any', - 'val1': 'tags', - 'val2': part[0][1:] - }, - 'val2': { - 'operator': 'equals_any', - 'val1': 'inherited_tags', - 'val2': part[0][1:] - } + 'interpretation': { + 'string': part[0], + 'interpretation': '(inherited) tag', + 'attribute': 'tag', + 'operator': 'equals_any', + 'error': False, + }, + 'operator': 'or', + 'val1': {'operator': 'equals_any', 'val1': 'tags', 'val2': part[0][1:]}, + 'val2': { + 'operator': 'equals_any', + 'val1': 'inherited_tags', + 'val2': part[0][1:], + }, } elif part.getName() == 'vrf_rt': - self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT") + self._logger.debug("Query part '%s' interpreted as VRF RT", part.vrf_rt) dictsql = { - 'interpretation': { - 'attribute': 'VRF RT', - 'interpretation': 'vrf_rt', - 'operator': 'equals', - 'string': part.vrf_rt, - 'error': False - }, + 'interpretation': { + 'attribute': 'VRF RT', + 'interpretation': 'vrf_rt', 'operator': 'equals', - 'val1': 'vrf_rt', - 'val2': part.vrf_rt - } + 'string': part.vrf_rt, + 'error': False, + }, + 'operator': 'equals', + 'val1': 'vrf_rt', + 'val2': part.vrf_rt, + } elif part.getName() == 'ipv6_address': - self._logger.debug("Query part '" + part.ipv6_address + "' interpreted as IPv6 address") + self._logger.debug("Query part '%s' interpreted as IPv6 address", part.ipv6_address) dictsql = { 'interpretation': { 'string': part.ipv6_address, 'interpretation': 'IPv6 address', 'attribute': 'prefix', 'operator': 'contains_equals', - 'error': False + 'error': False, }, 'operator': 'contains_equals', 'val1': 'prefix', - 'val2': part.ipv6_address + 'val2': part.ipv6_address, } elif part.getName() == 'ipv6_prefix': - self._logger.debug("Query part '" + part.ipv6_prefix[0] + "' interpreted as IPv6 prefix") + self._logger.debug("Query part '%' interpreted as IPv6 prefix", part.ipv6_prefix[0]) - strict_prefix = unicode(IPy.IP(part.ipv6_prefix[0], make_net=True)) + strict_prefix = str(IPy.IP(part.ipv6_prefix[0], make_net=True)) interp = { - 'string': part.ipv6_prefix[0], - 'interpretation': 'IPv6 prefix', - 'attribute': 'prefix', - 'operator': 'contained_within_equals', - 'error': False - } + 'string': part.ipv6_prefix[0], + 'interpretation': 'IPv6 prefix', + 'attribute': 'prefix', + 'operator': 'contained_within_equals', + 'error': False, + } if part.ipv6_prefix[0] != strict_prefix: interp['strict_prefix'] = strict_prefix @@ -533,7 +518,7 @@ def _string_to_dictsql(self, part): 'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', - 'val2': strict_prefix + 'val2': strict_prefix, } else: @@ -541,7 +526,7 @@ def _string_to_dictsql(self, part): # using pyparsing we do a bit of good ol parsing here if self._get_afi(part[0]) == 4 and len(part[0].split('/')) == 2: - self._logger.debug("Query part '" + part[0] + "' interpreted as prefix") + self._logger.debug("Query part '%s' interpreted as prefix", part[0]) address, prefix_length = part[0].split('/') # complete a prefix to it's fully expanded form @@ -551,15 +536,15 @@ def _string_to_dictsql(self, part): address += '.0' prefix = address + '/' + prefix_length - strict_prefix = unicode(IPy.IP(part[0], make_net=True)) + strict_prefix = str(IPy.IP(part[0], make_net=True)) interp = { - 'string': part[0], - 'interpretation': 'IPv4 prefix', - 'attribute': 'prefix', - 'operator': 'contained_within_equals', - 'error': False - } + 'string': part[0], + 'interpretation': 'IPv4 prefix', + 'attribute': 'prefix', + 'operator': 'contained_within_equals', + 'error': False, + } if prefix != part[0]: interp['expanded'] = prefix @@ -571,80 +556,79 @@ def _string_to_dictsql(self, part): 'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', - 'val2': strict_prefix + 'val2': strict_prefix, } # IPv4 address # split on dot to make sure we have all four octets before we do a # search elif self._get_afi(part[0]) == 4 and len(part[0].split('.')) == 4: - self._logger.debug("Query part '" + part[0] + "' interpreted as prefix") - address = unicode(IPy.IP(part[0])) + self._logger.debug("Query part '%s' interpreted as prefix", part[0]) + address = str(IPy.IP(part[0])) dictsql = { 'interpretation': { 'string': address, 'interpretation': 'IPv4 address', 'attribute': 'prefix', 'operator': 'contains_equals', - 'error': False + 'error': False, }, 'operator': 'contains_equals', 'val1': 'prefix', - 'val2': address + 'val2': address, } else: # Description or comment - self._logger.debug("Query part '" + part[0] + "' interpreted as text") + self._logger.debug("Query part '%s' interpreted as text", part[0]) dictsql = { - 'interpretation': { - 'string': part[0], - 'interpretation': 'text', - 'attribute': 'description or comment or node or order_id or customer_id', - 'operator': 'regex', - 'error': False - }, + 'interpretation': { + 'string': part[0], + 'interpretation': 'text', + 'attribute': 'description or comment or node or order_id or customer_id', + 'operator': 'regex', + 'error': False, + }, + 'operator': 'or', + 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'comment', - 'val2': part[0] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': part[0] - } - }, + 'operator': 'regex_match', + 'val1': 'comment', + 'val2': part[0], + }, 'val2': { 'operator': 'regex_match', - 'val1': 'node', - 'val2': part[0] - } + 'val1': 'description', + 'val2': part[0], }, + }, 'val2': { 'operator': 'regex_match', - 'val1': 'order_id', - 'val2': part[0] - }, + 'val1': 'node', + 'val2': part[0], }, + }, 'val2': { 'operator': 'regex_match', - 'val1': 'customer_id', - 'val2': part[0] - } - } + 'val1': 'order_id', + 'val2': part[0], + }, + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'customer_id', + 'val2': part[0], + }, + } return dictsql - class VrfSmartParser(SmartParser): attributes = { 'description': True, @@ -663,110 +647,100 @@ class VrfSmartParser(SmartParser): def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string """ - self._logger.debug("parsing string: " + unicode(part[0]) + " of type: " + part.getName()) + self._logger.debug("parsing string: %s of type: %s", part[0], part.getName()) if part.getName() == 'tag': - self._logger.debug("Query part '" + part[0] + "' interpreted as tag") + self._logger.debug("Query part '%s' interpreted as tag", part[0]) dictsql = { - 'interpretation': { - 'string': part[0], - 'interpretation': 'tag', - 'attribute': 'tag', - 'operator': 'equals_any', - 'error': False - }, + 'interpretation': { + 'string': part[0], + 'interpretation': 'tag', + 'attribute': 'tag', 'operator': 'equals_any', - 'val1': 'tags', - 'val2': part[0][1:] - } + 'error': False, + }, + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': part[0][1:], + } elif part.getName() == 'vrf_rt': - self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT") + self._logger.debug("Query part '%s' interpreted as VRF RT", part.vrf_rt) # TODO: enable this, our fancy new interpretation - dictsql = { - 'interpretation': { - 'attribute': 'VRF RT', - 'interpretation': 'vrf_rt', - 'operator': 'equals', - 'string': part.vrf_rt, - 'error': False - }, - 'operator': 'equals', - 'val1': 'vrf_rt', - 'val2': part.vrf_rt - } + # dictsql = { + # 'interpretation': { + # 'attribute': 'VRF RT', + # 'interpretation': 'vrf_rt', + # 'operator': 'equals', + # 'string': part.vrf_rt, + # 'error': False, + # }, + # 'operator': 'equals', + # 'val1': 'vrf_rt', + # 'val2': part.vrf_rt, + # s} # using old interpretation for the time being to make sure we align # with old smart search interpreter dictsql = { - 'interpretation': { - 'string': part.vrf_rt, - 'interpretation': 'text', - 'attribute': 'vrf or name or description', - 'operator': 'regex', - 'error': False - }, + 'interpretation': { + 'string': part.vrf_rt, + 'interpretation': 'text', + 'attribute': 'vrf or name or description', + 'operator': 'regex', + 'error': False, + }, + 'operator': 'or', + 'val1': { 'operator': 'or', 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': part.vrf_rt - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': part.vrf_rt - } + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part.vrf_rt, }, 'val2': { 'operator': 'regex_match', - 'val1': 'rt', - 'val2': part.vrf_rt - } - } + 'val1': 'description', + 'val2': part.vrf_rt, + }, + }, + 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part.vrf_rt}, + } else: - self._logger.debug("Query part '" + part[0] + "' interpreted as text") + self._logger.debug("Query part '%s' interpreted as text", part[0]) dictsql = { - 'interpretation': { - 'string': part[0], - 'interpretation': 'text', - 'attribute': 'vrf or name or description', - 'operator': 'regex', - 'error': False - }, + 'interpretation': { + 'string': part[0], + 'interpretation': 'text', + 'attribute': 'vrf or name or description', + 'operator': 'regex', + 'error': False, + }, + 'operator': 'or', + 'val1': { 'operator': 'or', 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': part[0] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': part[0] - } + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part[0], }, 'val2': { 'operator': 'regex_match', - 'val1': 'rt', - 'val2': part[0] - } - } + 'val1': 'description', + 'val2': part[0], + }, + }, + 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part[0]}, + } return dictsql - class ParserError(Exception): """ General parser error """ - if __name__ == '__main__': # set logging format LOG_FORMAT = "%(asctime)s: %(module)-10s %(levelname)-8s %(message)s" @@ -776,12 +750,14 @@ class ParserError(Exception): logger.setLevel(logging.DEBUG) p = VrfSmartParser() - #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') - #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') + # dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') + # dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') import sys + dictsql = p.parse(' '.join(sys.argv[1:])) import pprint - print "----------" - pp = pprint.PrettyPrinter(indent = 4) + + print("----------") + pp = pprint.PrettyPrinter(indent=4) pp.pprint(dictsql) - print "----------" + print("----------") diff --git a/nipap/nipap/xmlrpc.py b/nipap/nipap/xmlrpc.py index 86ff65f25..b4c11fcd8 100755 --- a/nipap/nipap/xmlrpc.py +++ b/nipap/nipap/xmlrpc.py @@ -10,14 +10,15 @@ import time import pytz from functools import wraps -from flask import Flask +from flask import Flask, current_app from flask import request, Response -from flaskext.xmlrpc import XMLRPCHandler, Fault +from flask_xmlrpcre.xmlrpcre import XMLRPCHandler, Fault +from flask_compress import Compress -from nipapconfig import NipapConfig -from backend import Nipap, NipapError +from .nipapconfig import NipapConfig +from .backend import Nipap, NipapError import nipap -from authlib import AuthFactory, AuthError +from .authlib import AuthFactory, AuthError def setup(app): @@ -29,14 +30,13 @@ def setup(app): return app - def _mangle_prefix(res): """ Mangle prefix result """ # fugly cast from large numbers to string to deal with XML-RPC - res['total_addresses'] = unicode(res['total_addresses']) - res['used_addresses'] = unicode(res['used_addresses']) - res['free_addresses'] = unicode(res['free_addresses']) + res['total_addresses'] = str(res['total_addresses']) + res['used_addresses'] = str(res['used_addresses']) + res['free_addresses'] = str(res['free_addresses']) # postgres has notion of infinite while datetime hasn't, if expires # is equal to the max datetime we assume it is infinity and instead @@ -62,11 +62,11 @@ def requires_auth(f): """ Class decorator for XML-RPC functions that requires auth """ @wraps(f) - def decorated(self, *args, **kwargs): """ """ + self.logger.debug("authenticating call with args %s and kwargs %s", args, kwargs) # Fetch auth options from args auth_options = {} nipap_args = {} @@ -75,18 +75,18 @@ def decorated(self, *args, **kwargs): if len(args) == 1: nipap_args = args[0] else: - self.logger.debug("Malformed request: got %d parameters" % len(args)) - raise Fault(1000, ("NIPAP API functions take exactly 1 argument (%d given)") % len(args)) + self.logger.debug("Malformed request: got %s parameters", len(args)) + raise Fault(1000, "NIPAP API functions take exactly 1 argument ({} given)".format(len(args))) - if type(nipap_args) != dict: + if not isinstance(nipap_args, dict): self.logger.debug("Function argument is not struct") - raise Fault(1000, ("Function argument must be XML-RPC struct/Python dict (Python %s given)." % - type(nipap_args).__name__ )) + raise Fault(1000, ("Function argument must be XML-RPC struct/Python dict (Python {} given).".format( + type(nipap_args).__name__ ))) # fetch auth options try: auth_options = nipap_args['auth'] - if type(auth_options) is not dict: + if not isinstance(auth_options, dict): raise ValueError() except (KeyError, ValueError): self.logger.debug("Missing/invalid authentication options in request.") @@ -134,11 +134,48 @@ def decorated(self, *args, **kwargs): new_args = dict(args[0]) new_args['auth'] = auth + self.logger.debug('Call authenticated - calling.. with new_args: %s', new_args) return f(self, *(new_args,), **kwargs) return decorated +def xmlrpc_bignum2str(res, keys=['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']): + """ + Cast from large numbers to string to deal with XML-RPC - + Performance seems equivalent to preexisting blocks, and improves readability IMO + Since targeted keys (quantity) all start with ['num_', 'total_', 'used_', 'free_'] a different version was tried + using .startswith(), however this proved less performing.. + + :param dict[str, dict] res: psql result to cast + :param list[str] keys: list of keys to cast to string if required + :rtype: dict[str, dict] + """ + if isinstance(res, dict): + if 'result' in res: + for entry in res['result']: + for v in ['_v4', '_v6']: + for key in [k+v for k in keys]: + if entry[key] is not None and not isinstance(entry[key], str): + entry[key] = str(entry[key]) + elif 'id' in res: + for v in ['_v4', '_v6']: + for key in [k + v for k in keys]: + if res[key] is not None and not isinstance(res[key], str): + res[key] = str(res[key]) + else: + raise ValueError('Illegal result: {}'.format(res)) + + elif isinstance(res, list): + for entry in res: + for v in ['_v4', '_v6']: + for key in [k+v for k in keys]: + if entry[key] is not None and not isinstance(entry[key], str): + entry[key] = str(entry[key]) + else: + raise ValueError('Illegal result: {}'.format(res)) + return res + class NipapXMLRPC: """ NIPAP XML-RPC API @@ -146,8 +183,7 @@ class NipapXMLRPC: def __init__(self): self.nip = Nipap() self.logger = logging.getLogger() - - + self.logger.setLevel(logging.DEBUG) @requires_auth def echo(self, args): @@ -172,8 +208,6 @@ def echo(self, args): if args.get('message') is not None: return args.get('message') - - @requires_auth def version(self, args): """ Returns nipapd version @@ -182,8 +216,6 @@ def version(self, args): """ return nipap.__version__ - - @requires_auth def db_version(self, args): """ Returns schema version of nipap psql db @@ -192,8 +224,6 @@ def db_version(self, args): """ return self.nip._get_db_version() - - # # VRF FUNCTIONS # @@ -212,20 +242,12 @@ def add_vrf(self, args): """ try: res = self.nip.add_vrf(args.get('auth'), args.get('attr')) - # fugly cast from large numbers to string to deal with XML-RPC - for val in ( 'num_prefixes_v4', 'num_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - res[val] = unicode(res[val]) - + res = xmlrpc_bignum2str(res, ['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def remove_vrf(self, args): @@ -241,14 +263,12 @@ def remove_vrf(self, args): try: self.nip.remove_vrf(args.get('auth'), args.get('vrf')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def list_vrf(self, args): - """ List VRFs. + """List VRFs. Valid keys in the `args`-struct: @@ -263,19 +283,12 @@ def list_vrf(self, args): res = self.nip.list_vrf(args.get('auth'), args.get('vrf')) # fugly cast from large numbers to string to deal with XML-RPC - for vrf in res: - for val in ( 'num_prefixes_v4', 'num_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - vrf[val] = unicode(vrf[val]) + res = xmlrpc_bignum2str(res, ['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def edit_vrf(self, args): @@ -294,19 +307,12 @@ def edit_vrf(self, args): res = self.nip.edit_vrf(args.get('auth'), args.get('vrf'), args.get('attr')) # fugly cast from large numbers to string to deal with XML-RPC - for vrf in res: - for val in ( 'num_prefixes_v4', 'num_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - vrf[val] = unicode(vrf[val]) + res = xmlrpc_bignum2str(res, ['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def search_vrf(self, args): @@ -329,19 +335,12 @@ def search_vrf(self, args): res = self.nip.search_vrf(args.get('auth'), args.get('query'), args.get('search_options') or {}) # fugly cast from large numbers to string to deal with XML-RPC - for vrf in res['result']: - for val in ( 'num_prefixes_v4', 'num_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - vrf[val] = unicode(vrf[val]) + res = xmlrpc_bignum2str(res, ['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def smart_search_vrf(self, args): @@ -361,24 +360,20 @@ def smart_search_vrf(self, args): search string and the search options used. """ try: - res = self.nip.smart_search_vrf(args.get('auth'), - args.get('query_string'), args.get('search_options', {}), - args.get('extra_query')) + res = self.nip.smart_search_vrf( + args.get('auth'), + args.get('query_string'), + args.get('search_options', {}), + args.get('extra_query'), + ) # fugly cast from large numbers to string to deal with XML-RPC - for vrf in res['result']: - for val in ( 'num_prefixes_v4', 'num_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - vrf[val] = unicode(vrf[val]) + res = xmlrpc_bignum2str(res, ['num_prefixes', 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) # # POOL FUNCTIONS @@ -400,22 +395,13 @@ def add_pool(self, args): res = self.nip.add_pool(args.get('auth'), args.get('attr')) # fugly cast from large numbers to string to deal with XML-RPC - for val in ( 'member_prefixes_v4', 'member_prefixes_v6', - 'used_prefixes_v4', 'used_prefixes_v6', 'free_prefixes_v4', - 'free_prefixes_v6', 'total_prefixes_v4', - 'total_prefixes_v6', 'total_addresses_v4', - 'total_addresses_v6', 'used_addresses_v4', - 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - if res[val] is not None: - res[val] = unicode(res[val]) + res = xmlrpc_bignum2str(res, ['member_prefixes', 'used_prefixes', 'free_prefixes', 'total_prefixes', + 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def remove_pool(self, args): @@ -431,10 +417,8 @@ def remove_pool(self, args): try: self.nip.remove_pool(args.get('auth'), args.get('pool')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def list_pool(self, args): @@ -453,23 +437,13 @@ def list_pool(self, args): res = self.nip.list_pool(args.get('auth'), args.get('pool')) # fugly cast from large numbers to string to deal with XML-RPC - for pool in res: - for val in ( 'member_prefixes_v4', 'member_prefixes_v6', - 'used_prefixes_v4', 'used_prefixes_v6', - 'free_prefixes_v4', 'free_prefixes_v6', - 'total_prefixes_v4', 'total_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', - 'free_addresses_v4', 'free_addresses_v6'): - if pool[val] is not None: - pool[val] = unicode(pool[val]) + res = xmlrpc_bignum2str(res, ['member_prefixes', 'used_prefixes', 'free_prefixes', 'total_prefixes', + 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def edit_pool(self, args): @@ -488,23 +462,13 @@ def edit_pool(self, args): res = self.nip.edit_pool(args.get('auth'), args.get('pool'), args.get('attr')) # fugly cast from large numbers to string to deal with XML-RPC - for pool in res: - for val in ( 'member_prefixes_v4', 'member_prefixes_v6', - 'used_prefixes_v4', 'used_prefixes_v6', 'free_prefixes_v4', - 'free_prefixes_v6', 'total_prefixes_v4', - 'total_prefixes_v6', 'total_addresses_v4', - 'total_addresses_v6', 'used_addresses_v4', - 'used_addresses_v6', 'free_addresses_v4', - 'free_addresses_v6'): - if pool[val] is not None: - pool[val] = unicode(pool[val]) + res = xmlrpc_bignum2str(res, ['member_prefixes', 'used_prefixes', 'free_prefixes', 'total_prefixes', + 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def search_pool(self, args): @@ -527,23 +491,13 @@ def search_pool(self, args): res = self.nip.search_pool(args.get('auth'), args.get('query'), args.get('search_options') or {}) # fugly cast from large numbers to string to deal with XML-RPC - for pool in res['result']: - for val in ( 'member_prefixes_v4', 'member_prefixes_v6', - 'used_prefixes_v4', 'used_prefixes_v6', - 'free_prefixes_v4', 'free_prefixes_v6', - 'total_prefixes_v4', 'total_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', - 'free_addresses_v4', 'free_addresses_v6'): - if pool[val] is not None: - pool[val] = unicode(pool[val]) + res = xmlrpc_bignum2str(res, ['member_prefixes', 'used_prefixes', 'free_prefixes', 'total_prefixes', + 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def smart_search_pool(self, args): @@ -563,28 +517,21 @@ def smart_search_pool(self, args): query string and the search options used. """ try: - res = self.nip.smart_search_pool(args.get('auth'), - args.get('query_string'), args.get('search_options') or {}, - args.get('extra_query')) + res = self.nip.smart_search_pool( + args.get('auth'), + args.get('query_string'), + args.get('search_options') or {}, + args.get('extra_query'), + ) # fugly cast from large numbers to string to deal with XML-RPC - for pool in res['result']: - for val in ( 'member_prefixes_v4', 'member_prefixes_v6', - 'used_prefixes_v4', 'used_prefixes_v6', - 'free_prefixes_v4', 'free_prefixes_v6', - 'total_prefixes_v4', 'total_prefixes_v6', - 'total_addresses_v4', 'total_addresses_v6', - 'used_addresses_v4', 'used_addresses_v6', - 'free_addresses_v4', 'free_addresses_v6'): - if pool[val] is not None: - pool[val] = unicode(pool[val]) + res = xmlrpc_bignum2str(res, ['member_prefixes', 'used_prefixes', 'free_prefixes', 'total_prefixes', + 'total_addresses', 'used_addresses', 'free_addresses']) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) # # PREFIX FUNCTIONS @@ -611,10 +558,8 @@ def add_prefix(self, args): res = _mangle_prefix(res) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def list_prefix(self, args): @@ -639,14 +584,12 @@ def list_prefix(self, args): prefix = _mangle_prefix(prefix) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def edit_prefix(self, args): - """ Edit prefix. + """Edit prefix. Valid keys in the `args`-struct: @@ -655,7 +598,7 @@ def edit_prefix(self, args): * `prefix` [struct] Prefix attributes which describes what prefix(es) to edit. * `attr` [struct] - Attribuets to set on the new prefix. + Attributes to set on the new prefix. """ try: res = self.nip.edit_prefix(args.get('auth'), args.get('prefix'), args.get('attr')) @@ -664,10 +607,8 @@ def edit_prefix(self, args): prefix = _mangle_prefix(prefix) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def remove_prefix(self, args): @@ -685,10 +626,8 @@ def remove_prefix(self, args): try: return self.nip.remove_prefix(args.get('auth'), args.get('prefix'), args.get('recursive')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def search_prefix(self, args): @@ -717,10 +656,8 @@ def search_prefix(self, args): prefix = _mangle_prefix(prefix) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def smart_search_prefix(self, args): @@ -747,17 +684,23 @@ def smart_search_prefix(self, args): """ try: - res = self.nip.smart_search_prefix(args.get('auth'), - args.get('query_string'), args.get('search_options') or {}, - args.get('extra_query')) + self.logger.debug('Entering ssp') + res = self.nip.smart_search_prefix( + args.get('auth'), + args.get('query_string'), + args.get('search_options') or {}, + args.get('extra_query'), + ) # mangle result for prefix in res['result']: prefix = _mangle_prefix(prefix) return res except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - + self.logger.debug(str(exc)) + self.logger.exception('unhandled..', exc) + raise Fault(exc.error_code, str(exc)) + except Exception as e: + self.logger.exception('unhandled..', e) @requires_auth @@ -776,10 +719,8 @@ def find_free_prefix(self, args): try: return self.nip.find_free_prefix(args.get('auth'), args.get('vrf'), args.get('args')) except NipapError as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) # # ASN FUNCTIONS @@ -801,10 +742,8 @@ def add_asn(self, args): try: return self.nip.add_asn(args.get('auth'), args.get('attr')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def remove_asn(self, args): @@ -821,10 +760,8 @@ def remove_asn(self, args): try: self.nip.remove_asn(args.get('auth'), args.get('asn')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def list_asn(self, args): @@ -843,10 +780,8 @@ def list_asn(self, args): try: return self.nip.list_asn(args.get('auth'), args.get('asn') or {}) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def edit_asn(self, args): @@ -865,10 +800,8 @@ def edit_asn(self, args): try: return self.nip.edit_asn(args.get('auth'), args.get('asn'), args.get('attr')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def search_asn(self, args): @@ -891,10 +824,8 @@ def search_asn(self, args): try: return self.nip.search_asn(args.get('auth'), args.get('query'), args.get('search_options') or {}) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) @requires_auth def smart_search_asn(self, args): @@ -915,14 +846,18 @@ def smart_search_asn(self, args): """ try: - return self.nip.smart_search_asn(args.get('auth'), - args.get('query_string'), args.get('search_options') or {}, - args.get('extra_query')) + return self.nip.smart_search_asn( + args.get('auth'), + args.get('query_string'), + args.get('search_options') or {}, + args.get('extra_query'), + ) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) - raise Fault(exc.error_code, unicode(exc)) - + self.logger.debug(str(exc)) + raise Fault(exc.error_code, str(exc)) if __name__ == '__main__': + if 'app' not in locals() and 'app' not in globals(): + app = current_app() app.run() diff --git a/nipap/nipapd b/nipap/nipapd index b0a1c6ae4..d52b26b22 100755 --- a/nipap/nipapd +++ b/nipap/nipapd @@ -1,13 +1,13 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # vim: et sw=4 sts=4 : import fcntl -import logging import logging.handlers +import logging import argparse import os import sys -import ConfigParser +import configparser import ssl from tornado.netutil import bind_sockets @@ -41,11 +41,7 @@ def exit_cleanup(): except psutil.NoSuchProcess: return - # Handle API change in psutil 2.0.0 - if int(psutil.__version__[0]) <= 2: - children = p.get_children - else: - children = p.children + children = p.children for pid in children(recursive=True): os.kill(pid.pid, signal.SIGTERM) @@ -61,18 +57,19 @@ def handle_sigterm(sig, frame): """ exit_cleanup() # and make a clean exit ourselves - #sys.exit(0) + # sys.exit(0) + # register signal handler for SIGTERM signal.signal(signal.SIGTERM, handle_sigterm) - def drop_privileges(uid_name='nobody', gid_name='nogroup'): if os.getuid() != 0: raise NipapError("non-root user cannot drop privileges") - import pwd, grp + import pwd + import grp # Get the uid/gid from the name uid = pwd.getpwnam(uid_name).pw_uid gid = grp.getgrnam(gid_name).gr_gid @@ -93,23 +90,29 @@ if __name__ == '__main__': parser.add_argument('--auto-install-db', action='store_true', help='automatically install db schema') parser.add_argument('--auto-upgrade-db', action='store_true', help='automatically upgrade db schema') parser.add_argument('-d', '--debug', action='store_true', default=None, dest='debug', help='enable debugging') - parser.add_argument('-f', '--foreground', action='store_true', default=None, dest='foreground', help='run in foreground and log to stdout') + parser.add_argument('-f', '--foreground', action='store_true', default=None, dest='foreground', + help='run in foreground and log to stdout') parser.add_argument('-l', '--listen', type=str, metavar='ADDRESS', help='listen to IPv4/6 ADDRESS') parser.add_argument('-p', '--port', dest='port', type=int, help='listen on TCP port PORT') - parser.add_argument('-s', '--ssl-port', dest='ssl_port', type=int, help='listen with SSL on TCP port PORT') - parser.add_argument('-c', '--config', dest='config_file', type=str, default='/etc/nipap/nipap.conf', help='read configuration from file CONFIG_FILE') + parser.add_argument('-s', '--ssl-port', dest='ssl_port', type=int, + help='listen with SSL on TCP port PORT') + parser.add_argument('-c', '--config', dest='config_file', type=str, default='/etc/nipap/nipap.conf', + help='read configuration from file CONFIG_FILE') parser.add_argument('-P', '--pid-file', type=str, help='write a PID file to PID_FILE') - parser.add_argument('--no-pid-file', action='store_true', default=False, help='turn off writing PID file (overrides config file)') + parser.add_argument('--no-pid-file', action='store_true', default=False, + help='turn off writing PID file (overrides config file)') parser.add_argument('--version', action='store_true', help='display version information and exit') - parser.add_argument("--db-version", dest="dbversion", action="store_true", help="display database schema version information and exit") + parser.add_argument("--db-version", dest="dbversion", action="store_true", + help="display database schema version information and exit") # Arguments overwriting config settings - cfg_args = [ 'debug', 'foreground', 'port', 'config_file' ] + cfg_args = ['debug', 'foreground', 'port', 'config_file'] args = parser.parse_args() if args.version: import nipap - print "nipapd version:", nipap.__version__ + + print("nipapd version:", nipap.__version__) sys.exit(0) # set logging format @@ -142,9 +145,9 @@ if __name__ == '__main__': try: cfg = NipapConfig(args.config_file, default) - except NipapConfigError, exc: + except NipapConfigError as exc: if args.config_file: - print >> sys.stderr, "The specified configuration file ('" + args.config_file + "') does not exist" + print("The specified configuration file ('" + args.config_file + "') does not exist", file=sys.stderr) sys.exit(1) # if no config file is specified, we'll live with our defaults @@ -154,9 +157,9 @@ if __name__ == '__main__': for arg_dest in cfg_args: if arg_dest in args_dict and args_dict[arg_dest] is not None: try: - cfg.set('nipapd', arg_dest, unicode(args_dict[arg_dest])) - except ConfigParser.NoSectionError as exc: - print >> sys.stderr, "The configuration file contains errors:", exc + cfg.set('nipapd', arg_dest, str(args_dict[arg_dest])) + except configparser.NoSectionError as exc: + print("The configuration file contains errors:", exc, file=sys.stderr) sys.exit(1) # Validate configuration before forking, to be able to print error message to user @@ -179,52 +182,57 @@ if __name__ == '__main__': try: drop_privileges(run_user, run_group) except NipapError: - print >> sys.stderr, ("nipapd is configured to drop privileges and run as user '%s' and group '%s', \n" - "but was not started as root and can therefore not drop privileges") % (run_user, run_group) + print(("nipapd is configured to drop privileges and run as user '%s' and group '%s', \n" + "but was not started as root and can therefore not drop privileges") % (run_user, run_group), + file=sys.stderr) sys.exit(1) except KeyError: - print >> sys.stderr, "Could not drop privileges to user '%s' and group '%s'" % (run_user, run_group) + print("Could not drop privileges to user '%s' and group '%s'" % (run_user, run_group), file=sys.stderr) sys.exit(1) from nipap.backend import Nipap + try: nip = Nipap(args.auto_install_db, args.auto_upgrade_db) except NipapDatabaseSchemaError as exc: - print >> sys.stderr, "ERROR:", str(exc) - print >> sys.stderr, "HINT: You can automatically install required extensions and the nipap schema with --auto-install-db" + print("ERROR:", str(exc), file=sys.stderr) + print("HINT: You can automatically install required extensions and the nipap schema with --auto-install-db", + file=sys.stderr) sys.exit(1) except NipapError as exc: - print >> sys.stderr, "ERROR:", str(exc) + print("ERROR:", str(exc), file=sys.stderr) sys.exit(1) if args.dbversion: - print "nipap db schema:", nip._get_db_version() + print("nipap db schema:", nip._get_db_version()) sys.exit(0) # check local auth db version from nipap import authlib + a = authlib.SqliteAuth('local', 'a', 'b', 'c') try: latest = a._latest_db_version() if not latest: - print >> sys.stderr, "It seems your Sqlite database for local auth is out of date" - print >> sys.stderr, "Please run 'nipap-passwd --upgrade-database' to upgrade your database." + print("It seems your Sqlite database for local auth is out of date", file=sys.stderr) + print("Please run 'nipap-passwd --upgrade-database' to upgrade your database.", file=sys.stderr) sys.exit(2) - except authlib.AuthSqliteError, e: - print >> sys.stderr, "Error checking version of Sqlite database for local auth: %s" % e + except authlib.AuthSqliteError as e: + print("Error checking version of Sqlite database for local auth: %s" % e, file=sys.stderr) sys.exit(1) del a if not cfg.getboolean('nipapd', 'foreground'): import nipap.daemon + ret = nipap.daemon.createDaemon() # pid file handling if cfg.get('nipapd', 'pid_file') and not args.no_pid_file: # need a+ to be able to read PID from file try: - lf = open(cfg.get('nipapd', 'pid_file'), 'a+', 0) - except IOError, exc: + lf = open(cfg.get('nipapd', 'pid_file'), 'r+', 300) + except IOError as exc: logger.error("Unable to open PID file '" + str(exc.filename) + "': " + str(exc.strerror)) sys.exit(1) try: @@ -245,6 +253,7 @@ if __name__ == '__main__': rest = nipap.rest.setup(app) import nipap.xmlrpc + nipapxml = nipap.xmlrpc.setup(app) if not cfg.getboolean('nipapd', 'foreground'): @@ -261,7 +270,7 @@ if __name__ == '__main__': rest.logger.setLevel(logging.DEBUG) if cfg.getboolean('nipapd', 'syslog'): - log_syslog = logging.handlers.SysLogHandler(address = '/dev/log') + log_syslog = logging.handlers.SysLogHandler(address='/dev/log') log_syslog.setFormatter(logging.Formatter("%(levelname)-8s %(message)s")) logger.addHandler(log_syslog) nipapxml.logger.addHandler(log_syslog) diff --git a/nipap/requirements.txt b/nipap/requirements.txt index 5ab4e0b23..dfba709b9 100644 --- a/nipap/requirements.txt +++ b/nipap/requirements.txt @@ -2,7 +2,7 @@ zipp==1.2.0 importlib_metadata==2.1.3 Flask==1.1.2 Flask-Compress==1.9.0 -Flask-XML-RPC==0.1.2 +flask-xml-rpc-re==0.1.4 Flask-RESTful==0.3.8 requests==2.25.1 IPy==1.01 @@ -13,14 +13,15 @@ backports.ssl-match-hostname==3.7.0.1 certifi==2020.12.5 itsdangerous==1.1.0 parsedatetime==2.6 -psutil==1.2.1 # rq.filter: >= 1.0,<2.0 +psutil==5.7.2 psycopg2==2.8.6 --no-binary psycopg2 pyparsing==2.4.7 python-dateutil==2.8.1 # optional dependency on ldap -#python-ldap==2.4.19 +python-ldap==3.3.1 pytz==2021.1 pysqlite==2.8.3 -tornado==5.1 +tornado==6.0.4 wsgiref==0.1.2 -pyjwt==1.5.3 \ No newline at end of file +pyjwt==1.5.3 +docutils==0.16 diff --git a/nipap/setup.py b/nipap/setup.py index 57847c850..1e2e64741 100644 --- a/nipap/setup.py +++ b/nipap/setup.py @@ -1,9 +1,10 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -from distutils.core import setup -import subprocess +from setuptools import setup +from docutils.core import publish_cmdline +from docutils.writers import manpage import sys - +import re import nipap @@ -11,50 +12,55 @@ def get_data_files(): # generate man pages using rst2man try: - subprocess.call(["rst2man", "nipapd.man.rst", "nipapd.8"]) - subprocess.call(["rst2man", "nipap-passwd.man.rst", "nipap-passwd.1"]) + publish_cmdline(writer=manpage.Writer(), argv=["nipapd.man.rst", "nipapd.8"]) + publish_cmdline(writer=manpage.Writer(), argv=["nipap-passwd.man.rst", "nipap-passwd.1"]) except OSError as exc: - print >> sys.stderr, "rst2man failed to run:", str(exc) + print("rst2man failed to run: %s" % str(exc), file=sys.stderr) sys.exit(1) files = [ - ('/etc/nipap/', ['nipap.conf.dist']), - ('/usr/sbin/', ['nipapd', 'nipap-passwd']), - ('/usr/share/nipap/sql/', [ - 'sql/upgrade-1-2.plsql', - 'sql/upgrade-2-3.plsql', - 'sql/upgrade-3-4.plsql', - 'sql/upgrade-4-5.plsql', - 'sql/upgrade-5-6.plsql', - 'sql/upgrade-6-7.plsql', - 'sql/functions.plsql', - 'sql/triggers.plsql', - 'sql/ip_net.plsql' - ]), - ('/usr/share/man/man8/', ['nipapd.8']), - ('/usr/share/man/man1/', ['nipap-passwd.1']) - ] + ('/etc/nipap/', ['nipap.conf.dist']), + ('/usr/sbin/', ['nipapd', 'nipap-passwd']), + ('/usr/share/nipap/sql/', [ + 'sql/upgrade-1-2.plsql', + 'sql/upgrade-2-3.plsql', + 'sql/upgrade-3-4.plsql', + 'sql/upgrade-4-5.plsql', + 'sql/upgrade-5-6.plsql', + 'sql/upgrade-6-7.plsql', + 'sql/functions.plsql', + 'sql/triggers.plsql', + 'sql/ip_net.plsql', + ], + ), + ('/usr/share/man/man8/', ['nipapd.8']), + ('/usr/share/man/man1/', ['nipap-passwd.1']), + ] return files long_desc = open('README.rst').read() short_desc = long_desc.split('\n')[0].split(' - ')[1].strip() +with open('requirements.txt', 'r') as f: + + requires = [re.sub(r'\s*([\w_\-\.\d]+([<>=]+\S+|)).*', r'\1', x.strip()) for x in f if + x.strip() and re.match(r'^\s*\w+', x.strip())] setup( - name = 'nipap', - version = nipap.__version__, - description = short_desc, - long_description = long_desc, - author = nipap.__author__, - author_email = nipap.__author_email__, - license = nipap.__license__, - url = nipap.__url__, - packages = ['nipap'], - keywords = ['nipap'], - requires = ['ldap', 'sqlite3', 'IPy', 'psycopg2', 'parsedatetime'], - data_files = get_data_files(), - classifiers = [ + name='nipap', + version=nipap.__version__, + description=short_desc, + long_description=long_desc, + author=nipap.__author__, + author_email=nipap.__author_email__, + license=nipap.__license__, + url=nipap.__url__, + packages=['nipap'], + keywords=['nipap'], + install_requires=requires, + data_files=get_data_files(), + classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', @@ -62,7 +68,7 @@ def get_data_files(): 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 2.6', - 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware' - ] + 'Programming Language :: Python :: 3.6', + 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', + ], ) diff --git a/nipap/wait-for-it.sh b/nipap/wait-for-it.sh new file mode 100755 index 000000000..071c2bee3 --- /dev/null +++ b/nipap/wait-for-it.sh @@ -0,0 +1,178 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available + +WAITFORIT_cmdname=${0##*/} + +echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + else + echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" + fi + WAITFORIT_start_ts=$(date +%s) + while : + do + if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then + nc -z $WAITFORIT_HOST $WAITFORIT_PORT + WAITFORIT_result=$? + else + (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 + WAITFORIT_result=$? + fi + if [[ $WAITFORIT_result -eq 0 ]]; then + WAITFORIT_end_ts=$(date +%s) + echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" + break + fi + sleep 1 + done + return $WAITFORIT_result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $WAITFORIT_QUIET -eq 1 ]]; then + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + else + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + fi + WAITFORIT_PID=$! + trap "kill -INT -$WAITFORIT_PID" INT + wait $WAITFORIT_PID + WAITFORIT_RESULT=$? + if [[ $WAITFORIT_RESULT -ne 0 ]]; then + echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + fi + return $WAITFORIT_RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + WAITFORIT_hostport=(${1//:/ }) + WAITFORIT_HOST=${WAITFORIT_hostport[0]} + WAITFORIT_PORT=${WAITFORIT_hostport[1]} + shift 1 + ;; + --child) + WAITFORIT_CHILD=1 + shift 1 + ;; + -q | --quiet) + WAITFORIT_QUIET=1 + shift 1 + ;; + -s | --strict) + WAITFORIT_STRICT=1 + shift 1 + ;; + -h) + WAITFORIT_HOST="$2" + if [[ $WAITFORIT_HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + WAITFORIT_HOST="${1#*=}" + shift 1 + ;; + -p) + WAITFORIT_PORT="$2" + if [[ $WAITFORIT_PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + WAITFORIT_PORT="${1#*=}" + shift 1 + ;; + -t) + WAITFORIT_TIMEOUT="$2" + if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + WAITFORIT_TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + WAITFORIT_CLI=("$@") + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} +WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} +WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} +WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} + +# check to see if timeout is from busybox? +WAITFORIT_TIMEOUT_PATH=$(type -p timeout) +WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) +if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then + WAITFORIT_ISBUSY=1 + WAITFORIT_BUSYTIMEFLAG="-t" + +else + WAITFORIT_ISBUSY=0 + WAITFORIT_BUSYTIMEFLAG="" +fi + +if [[ $WAITFORIT_CHILD -gt 0 ]]; then + wait_for + WAITFORIT_RESULT=$? + exit $WAITFORIT_RESULT +else + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + wait_for_wrapper + WAITFORIT_RESULT=$? + else + wait_for + WAITFORIT_RESULT=$? + fi +fi + +if [[ $WAITFORIT_CLI != "" ]]; then + if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then + echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" + exit $WAITFORIT_RESULT + fi + exec "${WAITFORIT_CLI[@]}" +else + exit $WAITFORIT_RESULT +fi diff --git a/nipap/xml-test.py b/nipap/xml-test.py index a3078e7f6..249049eb6 100755 --- a/nipap/xml-test.py +++ b/nipap/xml-test.py @@ -1,14 +1,14 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # coding: utf-8 -import xmlrpclib +import xmlrpc.client import argparse import time import sys parser = argparse.ArgumentParser() -parser.add_argument('-p', '--port', dest='port', type='int', default='1337', help="TCP port") +parser.add_argument('-p', '--port', dest='port', type=int, default='1337', help="TCP port") parser.add_argument('-U', '--user') parser.add_argument('-P', '--password') @@ -16,52 +16,44 @@ cred = '' if args.user and args.password: - cred = args.user + ':' + args.password + '@' - -server_url = 'http://%(cred)s127.0.0.1:%(port)d/XMLRPC' % { 'port': args.port, 'cred': cred } -server = xmlrpclib.Server(server_url, allow_none=1); - -ad = { 'authoritative_source': 'nipap' } -query = { - 'val1': 'name', - 'operator': 'regex_match', - 'val2': '(foo|b.*)' - } - -res = server.list_vrf({ 'auth': ad, 'spec': {} }) -print res -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': '', 'search_options': { 'include_all_parents': True } }) -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'foo', 'search_options': { 'include_all_parents': True } }) -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'foo', 'search_options': { 'include_all_parents': True } }) -#res = server.add_prefix({ 'spec': { 'prefix': '2.0.0.0/8' } }) -#print res -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'test1', 'search_options': { 'include_all_parents': True, 'root_prefix': '1.0.4.0/24' } }) -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'THISWILLNEVERMATCH', 'search_options': { 'include_all_parents': True, 'parent_prefix': 11963 } }) -#res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'test1', 'search_options': { 'include_all_parents': True, 'parent_prefix': 'bajs' } }) + cred = args.user + ':' + args.password + '@' + +server_url = 'http://%(cred)s127.0.0.1:%(port)d/XMLRPC' % { + 'port': args.port, + 'cred': cred, +} +server = xmlrpc.client.Server(server_url, allow_none=1) + +ad = {'authoritative_source': 'nipap'} +query = {'val1': 'name', 'operator': 'regex_match', 'val2': '(foo|b.*)'} + +res = server.list_vrf({'auth': ad, 'spec': {}}) +print(res) +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': '', 'search_options': { 'include_all_parents': True } }) +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'foo', 'search_options': { 'include_all_parents': True } }) +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'foo', 'search_options': { 'include_all_parents': True } }) +# res = server.add_prefix({ 'spec': { 'prefix': '2.0.0.0/8' } }) +# print res +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'test1', 'search_options': { 'include_all_parents': True, 'root_prefix': '1.0.4.0/24' } }) +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'THISWILLNEVERMATCH', 'search_options': { 'include_all_parents': True, 'parent_prefix': 11963 } }) +# res = server.smart_search_prefix({ 'auth': ad, 'query_string': 'test1', 'search_options': { 'include_all_parents': True, 'parent_prefix': 'bajs' } }) for p in res['result']: - print p -#for p in res: + print(p) +# for p in res: # print res[p] - #print "".join(" " for i in xrange(p['indent'])), p['prefix'], p['match'] +# print "".join(" " for i in xrange(p['indent'])), p['prefix'], p['match'] -#res = server.list_pool({ 'auth': ad, 'pool': { 'id': 1003 } }) -#res = server.version() +# res = server.list_pool({ 'auth': ad, 'pool': { 'id': 1003 } }) +# res = server.version() sys.exit(0) -remove_query = { - 'auth': { - 'authoritative_source': 'kll' - }, - 'schema': { - 'id': 1 - } - } -#server.remove_schema(remove_query) -#print server.list_vrf({ 'auth': ad }) -#sys.exit(0) -#print server.add_vrf({ 'auth': { 'authoritative_source': 'kll' }, +remove_query = {'auth': {'authoritative_source': 'kll'}, 'schema': {'id': 1}} +# server.remove_schema(remove_query) +# print server.list_vrf({ 'auth': ad }) +# sys.exit(0) +# print server.add_vrf({ 'auth': { 'authoritative_source': 'kll' }, # 'attr': { # 'vrf': '1257:124', # 'name': 'test2', @@ -69,77 +61,69 @@ # } # } # ) -#print server.list_vrf({ 'auth': ad, 'vrf': {} }) -#print server.add_prefix({ 'auth': ad, 'attr': { +# print server.list_vrf({ 'auth': ad, 'vrf': {} }) +# print server.add_prefix({ 'auth': ad, 'attr': { # 'prefix': '1.0.0.0/24', # 'type': 'assignment', # 'description': 'test' # } # }) # -#print "All VRFs:" -#res = server.list_prefix({ 'auth': ad }) -#for p in res: +# print "All VRFs:" +# res = server.list_prefix({ 'auth': ad }) +# for p in res: # print "%10s %s" % (p['vrf_name'], p['prefix']) # -#print "VRF: test2" -#res = server.list_prefix({ 'auth': ad, +# print "VRF: test2" +# res = server.list_prefix({ 'auth': ad, # 'prefix': { # 'vrf': '1257:124' # } # }) -#for p in res: +# for p in res: # print "%10s %s" % (p['vrf_name'], p['prefix']) -#t0 = time.time() -#import sys -#ss = u'ballong' -#print "Type of search string:", type(ss) -#print ss -#res = server.search_schema({ 'operator': 'regex_match', 'val1': 'name', 'val2': 'test' }, { 'max_result': 500 }) +# t0 = time.time() +# import sys +# ss = u'ballong' +# print "Type of search string:", type(ss) +# print ss +# res = server.search_schema({ 'operator': 'regex_match', 'val1': 'name', 'val2': 'test' }, { 'max_result': 500 }) a = { - 'auth': { - 'authoritative_source': 'kll' - }, - 'query_string': 'test', - 'search_options': { - 'include_all_parents': True, - 'root_prefix': '1.3.0.0/16' - } - } + 'auth': {'authoritative_source': 'kll'}, + 'query_string': 'test', + 'search_options': {'include_all_parents': True, 'root_prefix': '1.3.0.0/16'}, +} res = server.smart_search_prefix(a) for p in res['result']: - print p['vrf_rt'], p['display_prefix'], p['description'], p['match'] -#res = server.smart_search_prefix('test', { 'root_prefix': '1.3.0.0/8', 'max_result': 500 }) -#t1 = time.time() -#d1 = t1-t0 -#print "Timing:", d1 -#print res + print((p['vrf_rt'], p['display_prefix'], p['description'], p['match'])) +# res = server.smart_search_prefix('test', { 'root_prefix': '1.3.0.0/8', 'max_result': 500 }) +# t1 = time.time() +# d1 = t1-t0 +# print "Timing:", d1 +# print res # # echo test # -#print "try the echo function without args" -#args = {} -#print "ARGS:", args -#print "RESULT:", server.echo() -#print "" +# print "try the echo function without args" +# args = {} +# print "ARGS:", args +# print "RESULT:", server.echo() +# print "" # -#print "try the echo function with a message argument" -#args = { 'message': 'Please reply to me, Obi-Wan Kenobi, you are my only hope!' } -#print "ARGS:", args -#print "RESULT:", server.echo( args ) -#print "" +# print "try the echo function with a message argument" +# args = { 'message': 'Please reply to me, Obi-Wan Kenobi, you are my only hope!' } +# print "ARGS:", args +# print "RESULT:", server.echo( args ) +# print "" # # try list function # -#print "try the list prefix function with a node argument" -#args = { 'node': 'kst5-core-3' } -#print "ARGS:", args -#print "RESULT:", server.list_prefix( args ) -#print "" - - - +# print "try the list prefix function with a node argument" +# args = { 'node': 'kst5-core-3' } +# print "ARGS:", args +# print "RESULT:", server.list_prefix( args ) +# print "" diff --git a/nipap/xmlbench.py b/nipap/xmlbench.py index 274ce883f..1b9fd9983 100755 --- a/nipap/xmlbench.py +++ b/nipap/xmlbench.py @@ -1,65 +1,68 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 from twisted.web.xmlrpc import Proxy from twisted.internet import reactor import sys import datetime -class Request(): + + +class Request: def __init__(self, url, method, params): self.url = url self.method = method self.params = params self.start_time = 0 self.end_time = 0 - self.value = "" + self.value = "" self.error = "" self.finished = False - self.error_file = open('errors.csv','w+') - + self.error_file = open('errors.csv', 'w+') def addCallback(self, callback): self.callback = callback - def addErrback(self, errback): self.errback = errback - def makeRequest(self): proxy = Proxy(self.url) - proxy.callRemote(self.method,*self.params).addCallbacks(self.retSuccess, self.retFail) + proxy.callRemote(self.method, *self.params).addCallbacks( + self.retSuccess, self.retFail + ) self.start_time = datetime.datetime.now() - def __returned(self): self.end_time = datetime.datetime.now() - def retSuccess(self, value): self.__returned() self.finished = True self.value = value - self.callback(self,value) - + self.callback(self, value) def retFail(self, error): self.__returned() self.finished = True self.error = error self.error_file.write("Error: %s" % error) - self.callback(self,error) - + self.callback(self, error) def isFinished(self): return self.finished - def getTime(self): - return (self.end_time - self.start_time) # this should be a timedelta - - -class Benchmark(): - def __init__(self, concurrent = 10, total = 100, url = 'http://localhost:7080/XMLRPC', method = 'date', params=None): + return self.end_time - self.start_time # this should be a timedelta + + +class Benchmark: + def __init__( + self, + concurrent=10, + total=100, + url='http://localhost:7080/XMLRPC', + method='date', + params=None, + ): if params is None: params = {} self.url = url @@ -69,13 +72,11 @@ def __init__(self, concurrent = 10, total = 100, url = 'http://localhost:7080/XM self.total_reqs = total self.open_reqs = 0 self.current_reqs = 0 - self.error_file = open('errors.csv','w+') - self.req_times_file = open('times.csv','w+') - + self.error_file = open('errors.csv', 'w+') + self.req_times_file = open('times.csv', 'w+') def makeLog(self, filename): - self.log_file = open(filename,'w+') - + self.log_file = open(filename, 'w+') def makeRequest(self): req = Request(self.url, self.method, self.params) @@ -84,47 +85,49 @@ def makeRequest(self): req.makeRequest() self.open_reqs = self.open_reqs + 1 - def printReqDetail(self, req): - #print "Request time: %d ms" % req.getTime().microseconds + # print "Request time: %d ms" % req.getTime().microseconds delta = req.getTime() - print delta - + print(delta) def reqFinished(self, req): self.printReqDetail(req) self.open_reqs = self.open_reqs - 1 - self.current_reqs = self.current_reqs + 1 # completed requests - if ((self.current_reqs + self.open_reqs) < self.total_reqs): + self.current_reqs = self.current_reqs + 1 # completed requests + if (self.current_reqs + self.open_reqs) < self.total_reqs: self.makeRequest() else: if self.open_reqs == 0: - reactor.stop() # made as many requests as we wanted to + reactor.stop() # made as many requests as we wanted to - - def reqSuccess(self,req,value): + def reqSuccess(self, req, value): self.reqFinished(req) - print repr(value) - + print(repr(value)) - def reqError(self,req, error): + def reqError(self, req, error): self.reqFinished(req) - #print 'error', error - + # print 'error', error def setupReqs(self): - for i in range(0,self.concurrent_reqs): # make the initial pool of requests + for i in range(0, self.concurrent_reqs): # make the initial pool of requests self.makeRequest() if __name__ == '__main__': import argparse + parser = argparse.ArgumentParser() - parser.add_argument('-p', '--port', dest='port', type='int', default='1337', help="TCP port") + parser.add_argument( + '-p', '--port', dest='port', type='int', default='1337', help="TCP port" + ) parser.add_argument('-U', '--user') parser.add_argument('-P', '--password') - parser.add_argument('--concurrent', type='int', default=10, help="Concurrent requests") - parser.add_argument('--total', type='int', default=100, help="Total number of requests") + parser.add_argument( + '--concurrent', type='int', default=10, help="Concurrent requests" + ) + parser.add_argument( + '--total', type='int', default=100, help="Total number of requests" + ) parser.add_argument('--method', help="XML-RPC method to benchmark") parser.add_argument('--args', help="Args to XML-RPC method") @@ -133,12 +136,20 @@ def setupReqs(self): cred = '' if args.user and args.password: cred = args.user + ':' + args.password + '@' - server_url = 'http://%(cred)s127.0.0.1:%(port)d/XMLRPC' % { 'port': args.port, 'cred': cred } - - ad = { 'authoritative_source': 'nipap' } - args = [{ 'auth': ad, 'message': 'test', 'sleep': 0.1 }] - args = [{ 'auth': ad, 'query_string': 'foo' }] - b = Benchmark(concurrent = args.concurrent, total = args.total, url = - server_url, method = args.method, params = args) + server_url = 'http://%(cred)s127.0.0.1:%(port)d/XMLRPC' % { + 'port': args.port, + 'cred': cred, + } + + ad = {'authoritative_source': 'nipap'} + args = [{'auth': ad, 'message': 'test', 'sleep': 0.1}] + args = [{'auth': ad, 'query_string': 'foo'}] + b = Benchmark( + concurrent=args.concurrent, + total=args.total, + url=server_url, + method=args.method, + params=args, + ) b.setupReqs() reactor.run() diff --git a/tests/nipapbase.py b/tests/nipapbase.py index 1cdfb917e..363d4eaf7 100755 --- a/tests/nipapbase.py +++ b/tests/nipapbase.py @@ -10,6 +10,7 @@ from nipap.authlib import SqliteAuth from nipap.nipapconfig import NipapConfig + class NipapTest(unittest.TestCase): """ Tests the NIPAP class """ From 5f42c961b84771491e626fd2e1b6c3098d36560b Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Sat, 9 Apr 2022 08:03:44 +0200 Subject: [PATCH 02/37] Misc fixes; style and typos --- nipap/nipap/authlib.py | 4 +- nipap/nipap/backend.py | 53 +++++++++---------- nipap/nipap/daemon.py | 4 +- nipap/nipap/rest.py | 4 +- nipap/nipap/smart_parsing.py | 100 +++++++++++++++++------------------ 5 files changed, 82 insertions(+), 83 deletions(-) diff --git a/nipap/nipap/authlib.py b/nipap/nipap/authlib.py index a3dc61c1d..aa273b6c7 100644 --- a/nipap/nipap/authlib.py +++ b/nipap/nipap/authlib.py @@ -214,7 +214,7 @@ def get_auth(self, username, password, authoritative_source, auth_options=None): # save auth object to cache self._auth_cache[auth_str] = { 'valid_until': datetime.utcnow() + timedelta(seconds=self._config.getint('auth', 'auth_cache_timeout')), - 'auth_object': auth, + 'auth_object': auth } return auth @@ -541,7 +541,7 @@ def authenticate(self): ['cn', 'memberOf'], ) if res[0][1]['cn'][0] is not None: - self.full_name = res[0][1]['cn'][0].decode('utf-8') + self.full_name = res[0][1]['cn'][0] # check for ro_group membership if ro_group is configured if self._ldap_ro_group: if self._ldap_ro_group in res[0][1].get('memberOf', []): diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 7b3d193c0..c2e57e10d 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -547,7 +547,7 @@ '<<': '<<', 'contained_within': '<<', '<<=': '<<=', - 'contained_within_equals': '<<=', + 'contained_within_equals': '<<=' } """ Maps operators in a prefix query to SQL operators. """ @@ -850,7 +850,7 @@ def _execute(self, sql, opt=None, callno=0): m = re.search('invalid cidr value: "([^"]+)"', exc.pgerror) if m is not None: - strict_prefix = IPy.IP(m.group(1, make_net=True)) + strict_prefix = IPy.IP(m.group(1), make_net=True) estr = "Invalid prefix ({}); bits set to right of mask. Network address for current mask: {}" raise NipapValueError(estr.format(m.group(1), strict_prefix)) @@ -999,8 +999,8 @@ def _get_query_parts(self, query_str, search_options=None): # find query parts query_str_parts = [] try: - for part in shlex.split(query_str.encode('utf-8')): - query_str_parts.append({'string': part.decode('utf-8')}) + for part in shlex.split(query_str): + query_str_parts.append({'string': part}) except ValueError as exc: if str(exc) == 'No closing quotation': raise NipapValueError(str(exc)) @@ -1021,7 +1021,7 @@ def _get_db_version(self): dbname = self._cfg.get('nipapd', 'db_name') self._execute("SELECT description FROM pg_shdescription JOIN pg_database ON objoid = pg_database.oid " - "WHERE datname = '" + dbname + "'") + "WHERE datname = %(dbname)s", { "dbname": dbname }) comment = self._curs_pg.fetchone() if comment is None: raise NipapDatabaseNoVersionError("Could not find comment of psql database {}".format(dbname)) @@ -1103,7 +1103,7 @@ def _expand_vrf_query(self, query, table_name=None): name, that can be supplied via the table_name argument. """ - where = str() + where = "" opt = list() # handle table name, can be None @@ -1119,7 +1119,7 @@ def _expand_vrf_query(self, query, table_name=None): sub_where1, opt1 = self._expand_vrf_query(query['val1'], table_name) sub_where2, opt2 = self._expand_vrf_query(query['val2'], table_name) try: - where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) + where += " ({} {} {}) ".format(sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) @@ -1562,7 +1562,7 @@ def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=Non 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed', + 'error_message': 'query interpretation failed' } if extra_query is not None: @@ -1623,7 +1623,7 @@ def _expand_pool_query(self, query, table_name=None): name, that can be supplied via the table_name argument. """ - where = '' + where = "" opt = list() # handle table name, can be None @@ -1639,7 +1639,7 @@ def _expand_pool_query(self, query, table_name=None): sub_where1, opt1 = self._expand_pool_query(query['val1'], table_name) sub_where2, opt2 = self._expand_pool_query(query['val2'], table_name) try: - where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) + where += " ({} {} {}) ".format(sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) @@ -1921,7 +1921,7 @@ def edit_pool(self, auth, spec, attr): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source, + 'authoritative_source': auth.authoritative_source } for p in pools: audit_params['pool_id'] = p['id'] @@ -2138,7 +2138,7 @@ def smart_search_pool(self, auth, query_str, search_options=None, extra_query=No 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed', + 'error_message': 'query interpretation failed' } if extra_query is not None: @@ -2235,7 +2235,7 @@ def _expand_prefix_query(self, query, table_name=None): name, that can be supplied via the table_name argument. """ - where = '' + where = "" opt = list() # handle table name, can be None @@ -2244,9 +2244,9 @@ def _expand_prefix_query(self, query, table_name=None): else: col_prefix = table_name + "." - if 'val1' not in query or query['val1'] is None: + if 'val1' not in query: raise NipapMissingInputError("'val1' must be specified") - if 'val2' not in query or query['val2'] is None: + if 'val2' not in query: raise NipapMissingInputError("Value (val2 in API) for '{}' must be specified".format(query['val1'])) if isinstance(query['val1'], dict) and isinstance(query['val2'], dict): @@ -2256,7 +2256,7 @@ def _expand_prefix_query(self, query, table_name=None): sub_where1, opt1 = self._expand_prefix_query(query['val1'], table_name) sub_where2, opt2 = self._expand_prefix_query(query['val2'], table_name) try: - where += " (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) + where += " ({} {} {}) ".format(sub_where1, _operation_map[query['operator']], sub_where2) except KeyError: raise NipapNoSuchOperatorError("No such operator {}".format(query['operator'])) @@ -2291,7 +2291,6 @@ def _expand_prefix_query(self, query, table_name=None): 'contained_within', 'contained_within_equals', ): - # NOTE: removed col_prefix since it wasn't used where = " iprange(prefix) " + _operation_map[query['operator']] + " %s " elif query['operator'] in ('equals_any',): @@ -2501,7 +2500,7 @@ def add_prefix(self, auth, attr, args=None): audit_params['pool_id'] = pool['id'] audit_params['pool_name'] = pool['name'] audit_params['description'] = 'Pool ' + pool['name'] + ' expanded with prefix ' + prefix[ - 'prefix'] + ' in VRF ' + prefix['vrf_rt'] + 'prefix'] + ' in VRF ' + str(prefix['vrf_rt']) sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log ' + sql, params) @@ -2585,7 +2584,7 @@ def edit_prefix(self, auth, spec, attr): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source, + 'authoritative_source': auth.authoritative_source } for p in prefixes: @@ -2945,12 +2944,12 @@ def remove_prefix(self, auth, spec, recursive=False): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source, + 'authoritative_source': auth.authoritative_source } for p in prefixes: audit_params['prefix_id'] = p['id'] audit_params['prefix_prefix'] = p['prefix'] - audit_params['description'] = 'Removed prefix {}'.format(p['prefix']) + audit_params['description'] = 'Removed prefix ' + p['prefix'] audit_params['vrf_id'] = p['vrf_id'] audit_params['vrf_rt'] = p['vrf_rt'] audit_params['vrf_name'] = p['vrf_name'] @@ -2968,7 +2967,7 @@ def remove_prefix(self, auth, spec, recursive=False): 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, - 'authoritative_source': auth.authoritative_source, + 'authoritative_source': auth.authoritative_source } sql, params = self._sql_expand_insert(audit_params2) self._execute('INSERT INTO ip_net_log ' + sql, params) @@ -3192,7 +3191,7 @@ def search_prefix(self, auth, query, search_options=None): search_options['parent_prefix'] = None else: try: - _ = int(search_options['parent_prefix']) + int(search_options['parent_prefix']) except ValueError: raise NipapValueError("Invalid value '{}' for option 'parent_prefix'. " "Must be the ID of a prefix.".format(search_options['parent_prefix'])) @@ -3425,7 +3424,7 @@ def smart_search_prefix(self, auth, query_str, search_options=None, extra_query= 'search_options': search_options, 'result': [], 'error': True, - 'error_message': 'query interpretation failed', + 'error_message': 'query interpretation failed' } if extra_query is not None: @@ -3711,7 +3710,7 @@ def remove_asn(self, auth, asn): for a in asns: audit_params = { 'username': auth.username, - 'authenticated_as': auth.authenticated_ass, + 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Removed ASN %s' % a['asn'] @@ -3794,7 +3793,7 @@ def search_asn(self, auth, query, search_options=None): try: search_options['offset'] = int(search_options['offset']) except (ValueError, TypeError): - raise NipapValueError("Invalid value for option'offset'. Only integer values allowed.") + raise NipapValueError("Invalid value for option 'offset'. Only integer values allowed.") self._logger.debug('search_asn search_options: %s', search_options) @@ -3941,7 +3940,7 @@ def _parse_asn_query(self, query_str): }, 'operator': 'and', 'val1': query_part, - 'val2': query, + 'val2': query } return True, query diff --git a/nipap/nipap/daemon.py b/nipap/nipap/daemon.py index 215e9d3b3..90b57fdee 100644 --- a/nipap/nipap/daemon.py +++ b/nipap/nipap/daemon.py @@ -55,7 +55,7 @@ def createDaemon(): # to insure that the next call to os.setsid is successful. pid = os.fork() except OSError as exc: - raise Exception("{} [{0:d}]".format(exc.strerror, exc.errno)) + raise Exception("{} [{}]".format(exc.strerror, exc.errno)) if pid == 0: # The first child. # To become the session leader of this new session and the process group @@ -103,7 +103,7 @@ def createDaemon(): # a controlling terminal. pid = os.fork() # Fork a second child. except OSError as exc: - raise Exception("{} [{0:d}]".format(exc.strerror, exc.errno)) + raise Exception("{} [{}]".format(exc.strerror, exc.errno)) if pid == 0: # The second child. # Since the current working directory may be a mounted filesystem, we diff --git a/nipap/nipap/rest.py b/nipap/nipap/rest.py index b546bebb2..d0e626dae 100644 --- a/nipap/nipap/rest.py +++ b/nipap/nipap/rest.py @@ -14,9 +14,9 @@ from flask import Flask, request, Response, got_request_exception, jsonify from flask_restful import Resource, Api, abort -from backend import Nipap, NipapError +from .backend import Nipap, NipapError import nipap -from authlib import AuthFactory, AuthError +from .authlib import AuthFactory, AuthError def setup(app): api = Api(app, prefix="/rest/v1") diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 1bd91e09c..7959c6553 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -193,7 +193,7 @@ def _ast_to_dictsql(self, input_ast): dss['interpretation'] = { 'interpretation': op, 'operator': op, - 'error': False, + 'error': False } continue @@ -294,8 +294,8 @@ def parse(self, input_string): 'attribute': 'text', 'operator': None, 'error': True, - 'error_message': None, - }, + 'error_message': None + } } if '"' in stripped_line or "'" in stripped_line: @@ -346,11 +346,11 @@ def _string_to_dictsql(self, part): 'interpretation': 'tag', 'attribute': 'tag', 'operator': 'equals_any', - 'error': False, + 'error': False }, 'operator': 'equals_any', 'val1': 'tags', - 'val2': part[0][1:], + 'val2': part[0][1:] } elif part.getName() == 'vrf_rt': @@ -376,19 +376,19 @@ def _string_to_dictsql(self, part): 'interpretation': 'text', 'operator': 'regex', 'string': part.vrf_rt, - 'error': False, + 'error': False }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': part.vrf_rt, + 'val2': part.vrf_rt }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part.vrf_rt, - }, + 'val2': part.vrf_rt + } } else: @@ -399,15 +399,15 @@ def _string_to_dictsql(self, part): 'interpretation': 'text', 'operator': 'regex', 'string': part[0], - 'error': False, + 'error': False }, 'operator': 'or', 'val1': {'operator': 'regex_match', 'val1': 'name', 'val2': part[0]}, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part[0], - }, + 'val2': part[0] + } } return dictsql @@ -443,7 +443,7 @@ class PrefixSmartParser(SmartParser): 'type': ['assignment', 'host', 'reservation'], 'used_addreses': True, 'vlan': True, - 'vrf': True, + 'vrf': True } def _string_to_dictsql(self, part): @@ -459,15 +459,15 @@ def _string_to_dictsql(self, part): 'interpretation': '(inherited) tag', 'attribute': 'tag', 'operator': 'equals_any', - 'error': False, + 'error': False }, 'operator': 'or', 'val1': {'operator': 'equals_any', 'val1': 'tags', 'val2': part[0][1:]}, 'val2': { 'operator': 'equals_any', 'val1': 'inherited_tags', - 'val2': part[0][1:], - }, + 'val2': part[0][1:] + } } elif part.getName() == 'vrf_rt': @@ -478,11 +478,11 @@ def _string_to_dictsql(self, part): 'interpretation': 'vrf_rt', 'operator': 'equals', 'string': part.vrf_rt, - 'error': False, + 'error': False }, 'operator': 'equals', 'val1': 'vrf_rt', - 'val2': part.vrf_rt, + 'val2': part.vrf_rt } elif part.getName() == 'ipv6_address': @@ -493,11 +493,11 @@ def _string_to_dictsql(self, part): 'interpretation': 'IPv6 address', 'attribute': 'prefix', 'operator': 'contains_equals', - 'error': False, + 'error': False }, 'operator': 'contains_equals', 'val1': 'prefix', - 'val2': part.ipv6_address, + 'val2': part.ipv6_address } elif part.getName() == 'ipv6_prefix': @@ -509,7 +509,7 @@ def _string_to_dictsql(self, part): 'interpretation': 'IPv6 prefix', 'attribute': 'prefix', 'operator': 'contained_within_equals', - 'error': False, + 'error': False } if part.ipv6_prefix[0] != strict_prefix: interp['strict_prefix'] = strict_prefix @@ -518,7 +518,7 @@ def _string_to_dictsql(self, part): 'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', - 'val2': strict_prefix, + 'val2': strict_prefix } else: @@ -543,7 +543,7 @@ def _string_to_dictsql(self, part): 'interpretation': 'IPv4 prefix', 'attribute': 'prefix', 'operator': 'contained_within_equals', - 'error': False, + 'error': False } if prefix != part[0]: @@ -556,7 +556,7 @@ def _string_to_dictsql(self, part): 'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', - 'val2': strict_prefix, + 'val2': strict_prefix } # IPv4 address @@ -571,11 +571,11 @@ def _string_to_dictsql(self, part): 'interpretation': 'IPv4 address', 'attribute': 'prefix', 'operator': 'contains_equals', - 'error': False, + 'error': False }, 'operator': 'contains_equals', 'val1': 'prefix', - 'val2': address, + 'val2': address } else: @@ -587,7 +587,7 @@ def _string_to_dictsql(self, part): 'interpretation': 'text', 'attribute': 'description or comment or node or order_id or customer_id', 'operator': 'regex', - 'error': False, + 'error': False }, 'operator': 'or', 'val1': { @@ -599,31 +599,31 @@ def _string_to_dictsql(self, part): 'val1': { 'operator': 'regex_match', 'val1': 'comment', - 'val2': part[0], + 'val2': part[0] }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part[0], - }, + 'val2': part[0] + } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', - 'val2': part[0], - }, + 'val2': part[0] + } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', - 'val2': part[0], - }, + 'val2': part[0] + } }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', - 'val2': part[0], - }, + 'val2': part[0] + } } return dictsql @@ -641,7 +641,7 @@ class VrfSmartParser(SmartParser): 'total_addresses_v4': True, 'total_addresses_v6': True, 'used_addresses_v4': True, - 'used_addresses_v6': True, + 'used_addresses_v6': True } def _string_to_dictsql(self, part): @@ -657,11 +657,11 @@ def _string_to_dictsql(self, part): 'interpretation': 'tag', 'attribute': 'tag', 'operator': 'equals_any', - 'error': False, + 'error': False }, 'operator': 'equals_any', 'val1': 'tags', - 'val2': part[0][1:], + 'val2': part[0][1:] } elif part.getName() == 'vrf_rt': @@ -687,7 +687,7 @@ def _string_to_dictsql(self, part): 'interpretation': 'text', 'attribute': 'vrf or name or description', 'operator': 'regex', - 'error': False, + 'error': False }, 'operator': 'or', 'val1': { @@ -695,15 +695,15 @@ def _string_to_dictsql(self, part): 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': part.vrf_rt, + 'val2': part.vrf_rt }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part.vrf_rt, + 'val2': part.vrf_rt }, }, - 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part.vrf_rt}, + 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part.vrf_rt} } else: @@ -714,7 +714,7 @@ def _string_to_dictsql(self, part): 'interpretation': 'text', 'attribute': 'vrf or name or description', 'operator': 'regex', - 'error': False, + 'error': False }, 'operator': 'or', 'val1': { @@ -722,15 +722,15 @@ def _string_to_dictsql(self, part): 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': part[0], + 'val2': part[0] }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part[0], - }, + 'val2': part[0] + } }, - 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part[0]}, + 'val2': {'operator': 'regex_match', 'val1': 'rt', 'val2': part[0]} } return dictsql @@ -750,8 +750,8 @@ class ParserError(Exception): logger.setLevel(logging.DEBUG) p = VrfSmartParser() - # dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') - # dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') + #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') + #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') import sys dictsql = p.parse(' '.join(sys.argv[1:])) From 8b49a146382930234b38f559b2e382d4cbfc4da6 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 08:08:26 +0100 Subject: [PATCH 03/37] nipapd: Moved config defaults Moved config defaults from nipapd to nipapconfig module. Also updated handling of missing values in the config file, as None is no longer permitted by the configparser module. --- nipap/nipap/nipapconfig.py | 30 +++++++++++++++++++++++++----- nipap/nipapd | 30 ++++-------------------------- 2 files changed, 29 insertions(+), 31 deletions(-) diff --git a/nipap/nipap/nipapconfig.py b/nipap/nipap/nipapconfig.py index 00591bf6f..916ba569d 100644 --- a/nipap/nipap/nipapconfig.py +++ b/nipap/nipap/nipapconfig.py @@ -1,5 +1,28 @@ import configparser +DEFAULT = { + 'syslog': 'false', + 'debug': 'false', + 'foreground': 'false', + 'forks': 0, + 'pid_file': '', + 'listen': '127.0.0.1', + 'port': '1337', + 'ssl_port': '', + 'ssl_cert_file': '', + 'ssl_key_file': '', + 'db_host': 'localhost', + 'db_name': 'nipap', + 'db_port': '', + 'db_user': 'nipap', + 'db_pass': 'papin', + 'db_sslmode': 'require', + 'auth_cache_timeout': '3600', + 'user': '', + 'group': '' +} + + class NipapConfig(configparser.ConfigParser): """ Makes configuration data available. @@ -13,20 +36,17 @@ class NipapConfig(configparser.ConfigParser): _config = None _cfg_path = None - def __init__(self, cfg_path=None, default=None): + def __init__(self, cfg_path=None): """ Takes config file path and command line arguments. """ self.__dict__ = self.__shared_state - if default is None: - default = {} - if len(self.__shared_state) == 0: # First time - create new instance! self._cfg_path = cfg_path - configparser.ConfigParser.__init__(self, default, inline_comment_prefixes=";#") + configparser.ConfigParser.__init__(self, DEFAULT, inline_comment_prefixes=";#") self.read_file() diff --git a/nipap/nipapd b/nipap/nipapd index d52b26b22..73a878f98 100755 --- a/nipap/nipapd +++ b/nipap/nipapd @@ -121,30 +121,8 @@ if __name__ == '__main__': logging.basicConfig(format=LOG_FORMAT) logger = logging.getLogger() - default = { - 'syslog': 'false', - 'debug': 'false', - 'foreground': 'false', - 'forks': 0, - 'pid_file': None, - 'listen': '127.0.0.1', - 'port': '1337', - 'ssl_port': None, - 'ssl_cert_file': None, - 'ssl_key_file': None, - 'db_host': 'localhost', - 'db_name': 'nipap', - 'db_port': None, - 'db_user': 'nipap', - 'db_pass': 'papin', - 'db_sslmode': 'require', - 'auth_cache_timeout': '3600', - 'user': None, - 'group': None - } - try: - cfg = NipapConfig(args.config_file, default) + cfg = NipapConfig(args.config_file) except NipapConfigError as exc: if args.config_file: print("The specified configuration file ('" + args.config_file + "') does not exist", file=sys.stderr) @@ -164,7 +142,7 @@ if __name__ == '__main__': # Validate configuration before forking, to be able to print error message to user setup_plaintext = cfg.get('nipapd', 'port') != '' - setup_ssl = cfg.get('nipapd', 'ssl_port') is not None and cfg.get('nipapd', 'ssl_port') != '' + setup_ssl = cfg.get('nipapd', 'ssl_port') != '' if not setup_plaintext and not setup_ssl: print >> sys.stderr, "ERROR: Configured to listen to neither plaintext nor SSL" sys.exit(1) @@ -173,9 +151,9 @@ if __name__ == '__main__': sys.exit(1) # drop privileges - if cfg.get('nipapd', 'user') is not None: + if cfg.get('nipapd', 'user') != '': run_user = cfg.get('nipapd', 'user') - if cfg.get('nipapd', 'group') is not None: + if cfg.get('nipapd', 'group') != '': run_group = cfg.get('nipapd', 'group') else: run_group = cfg.get('nipapd', 'user') From 95a6fea50703ecf78546403ec26951630da5c2ce Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 16:06:16 +0100 Subject: [PATCH 04/37] nipapd: Handle xmlrpc.client.DateTime xmlrpc.client provides a DateTime object for datetimes sent via XML-RPC. As this is in Python 3 it breaks the current handling. Adapted. --- nipap/nipap/xmlrpc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nipap/nipap/xmlrpc.py b/nipap/nipap/xmlrpc.py index b4c11fcd8..ad7508ccf 100755 --- a/nipap/nipap/xmlrpc.py +++ b/nipap/nipap/xmlrpc.py @@ -9,6 +9,7 @@ import logging import time import pytz +import xmlrpc.client from functools import wraps from flask import Flask, current_app from flask import request, Response @@ -601,7 +602,10 @@ def edit_prefix(self, args): Attributes to set on the new prefix. """ try: - res = self.nip.edit_prefix(args.get('auth'), args.get('prefix'), args.get('attr')) + attr = args.get('attr') + if attr is not None and 'expires' in attr and isinstance(attr['expires'], xmlrpc.client.DateTime): + attr['expires'] = attr['expires'].value + res = self.nip.edit_prefix(args.get('auth'), args.get('prefix'), attr) # mangle result for prefix in res: prefix = _mangle_prefix(prefix) From 3b6b8bb32b59aac4052748b2ea0c85542a12261d Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 23:19:57 +0100 Subject: [PATCH 05/37] whoisd: Updated for Python 3 compatiiblity --- whoisd/nipap-whoisd | 55 ++++++++++++++++++++---------------------- whoisd/nipap_whoisd.py | 10 ++++---- 2 files changed, 31 insertions(+), 34 deletions(-) diff --git a/whoisd/nipap-whoisd b/whoisd/nipap-whoisd index e3ba10a4b..9fd6e38a4 100755 --- a/whoisd/nipap-whoisd +++ b/whoisd/nipap-whoisd @@ -1,6 +1,4 @@ -#!/usr/bin/env python - -from __future__ import unicode_literals +#!/usr/bin/env python3 import fcntl import os @@ -8,9 +6,9 @@ import re import socket import sys -import ConfigParser -import SocketServer -from SocketServer import ForkingMixIn, TCPServer +import configparser +import socketserver +from socketserver import ForkingMixIn, TCPServer import nipap_whoisd @@ -63,10 +61,10 @@ class ForkingTCPServer(ForkingMixIn, TCPServer): def server_bind(self): self.socket = socket.socket(socket.AF_INET6, self.socket_type) self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False) - SocketServer.TCPServer.server_bind(self) + socketserver.TCPServer.server_bind(self) -class WhoisServer(SocketServer.StreamRequestHandler): +class WhoisServer(socketserver.StreamRequestHandler): def handle(self): """ Called for every connection """ @@ -147,20 +145,19 @@ if __name__ == '__main__': args = parser.parse_args() if args.version: - print "nipap-whoisd version:", nipap_whoisd.__version__ + print("nipap-whoisd version:", nipap_whoisd.__version__) sys.exit(0) default = { - 'pid_file': None, + 'pid_file': '', 'listen': '0.0.0.0', 'port': '43', 'nipapd_host': 'localhost', 'nipapd_port': '1337', - 'nipapd_username': None, - 'user': None, - 'group': None + 'user': '', + 'group': '' } - cfg = ConfigParser.ConfigParser(default) + cfg = configparser.ConfigParser(default) cfg.read(args.config_file) # override config with command line args @@ -171,29 +168,29 @@ if __name__ == '__main__': if args.pid_file: cfg.set('whoisd', 'pid_file', args.pid_file) - SocketServer.TCPServer.allow_reuse_address = True + socketserver.TCPServer.allow_reuse_address = True try: server = ForkingTCPServer((cfg.get('whoisd', 'listen'), int(cfg.get('whoisd', 'port'))), WhoisServer) - except socket.error, exc: - print >> sys.stderr, "Unable to bind to socket", str(exc) + except socket.error as exc: + print("Unable to bind to socket", str(exc), file=sys.stderr) sys.exit(1) # drop privileges - if cfg.get('whoisd', 'user') is not None: + if cfg.get('whoisd', 'user') != '': run_user = cfg.get('whoisd', 'user') - if cfg.get('whoisd', 'group') is not None: + if cfg.get('whoisd', 'group') != '': run_group = cfg.get('whoisd', 'group') else: run_group = cfg.get('whoisd', 'user') try: nipap_whoisd.drop_privileges(run_user, run_group) - except Exception, e: - print >> sys.stderr, ("nipap-whoisd is configured to drop privileges and run as user '%s' and group '%s', \n" - "but was not started as root and can therefore not drop privileges") % (run_user, run_group) + except Exception as e: + print(("nipap-whoisd is configured to drop privileges and run as user '%s' and group '%s', \n" + "but was not started as root and can therefore not drop privileges") % (run_user, run_group), file=sys.stderr) sys.exit(1) except KeyError: - print >> sys.stderr, "Could not drop privileges to user '%s' and group '%s'" % (run_user, run_group) + print("Could not drop privileges to user '%s' and group '%s'" % (run_user, run_group), file=sys.stderr) sys.exit(1) # daemonize @@ -201,20 +198,20 @@ if __name__ == '__main__': nipap_whoisd.createDaemon() # pid file handling - if cfg.get('whoisd', 'pid_file') and not args.no_pid_file: + if cfg.get('whoisd', 'pid_file') != '' and not args.no_pid_file: # need a+ to be able to read PID from file try: lf = open(cfg.get('whoisd', 'pid_file'), 'a+', 0) - except IOError, exc: - print >> sys.stderr, "Unable to open PID file '%s': %s" % ( - str(exc.filename), str(exc.strerror)) + except IOError as exc: + print("Unable to open PID file '%s': %s" % ( + str(exc.filename), str(exc.strerror)), file=sys.stderr) sys.exit(1) try: fcntl.flock(lf, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: - print >> sys.stderr, 'nipap-whoisd already running (pid: %s)' % lf.read().strip() + print('nipap-whoisd already running (pid: %s)' % lf.read().strip(), file=sys.stderr) sys.exit(1) - print >> sys.stderr, 'Writing PID to file: %s' % cfg.get('whoisd', 'pid_file') + print('Writing PID to file: %s' % cfg.get('whoisd', 'pid_file'), file=sys.stderr) lf.truncate() lf.write('%d\n' % os.getpid()) lf.flush() diff --git a/whoisd/nipap_whoisd.py b/whoisd/nipap_whoisd.py index 05071dfa9..4aeb6773c 100644 --- a/whoisd/nipap_whoisd.py +++ b/whoisd/nipap_whoisd.py @@ -37,8 +37,8 @@ def createDaemon(): # and inherits the parent's process group ID. This step is required # to insure that the next call to os.setsid is successful. pid = os.fork() - except OSError, e: - raise Exception, "%s [%d]" % (e.strerror, e.errno) + except OSError as e: + raise Exception("%s [%d]" % (e.strerror, e.errno)) if (pid == 0): # The first child. # To become the session leader of this new session and the process group @@ -85,8 +85,8 @@ def createDaemon(): # longer a session leader, preventing the daemon from ever acquiring # a controlling terminal. pid = os.fork() # Fork a second child. - except OSError, e: - raise Exception, "%s [%d]" % (e.strerror, e.errno) + except OSError as e: + raise Exception("%s [%d]" % (e.strerror, e.errno)) if (pid == 0): # The second child. # Since the current working directory may be a mounted filesystem, we @@ -182,4 +182,4 @@ def drop_privileges(uid_name='nobody', gid_name='nogroup'): os.setuid(uid) # Ensure a very conservative umask - old_umask = os.umask(077) + old_umask = os.umask(0o77) From b45f304f0c263c09ff7048f34f8f1366770fda65 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 08:11:54 +0100 Subject: [PATCH 06/37] tests: Updated XML-RPC tests for Python3 compat Updated the XML-RPC tests to run on Python 3. The file needed to be renamed as the old name caused a collision with the built-in xmlrpc module. --- .github/workflows/ci.yml | 2 +- tests/{xmlrpc.py => test_xmlrpc.py} | 92 ++++++++++++++--------------- 2 files changed, 47 insertions(+), 47 deletions(-) rename tests/{xmlrpc.py => test_xmlrpc.py} (91%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 16c629c05..2293fca24 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -150,7 +150,7 @@ jobs: env: REQUESTS_CA_BUNDLE: /etc/ssl/certs/ca-certificates.crt run: | - nosetests tests/xmlrpc.py + nosetests tests/test_xmlrpc.py nosetests tests/nipaptest.py nosetests tests/test_cli.py nosetests tests/nipap-ro.py diff --git a/tests/xmlrpc.py b/tests/test_xmlrpc.py similarity index 91% rename from tests/xmlrpc.py rename to tests/test_xmlrpc.py index ca51070ec..6d893a543 100755 --- a/tests/xmlrpc.py +++ b/tests/test_xmlrpc.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # vim: et : # @@ -25,10 +25,10 @@ logger.setLevel(logging.DEBUG) log_format = "%(levelname)-8s %(message)s" -import xmlrpclib +import xmlrpc.client server_url = "http://unittest:gottatest@127.0.0.1:1337/XMLRPC" -s = xmlrpclib.Server(server_url, allow_none=1); +s = xmlrpc.client.Server(server_url, allow_none=1); ad = { 'authoritative_source': 'nipap' } @@ -333,11 +333,11 @@ def test_vrf_add_list(self): """ Add a VRF and verify result in database """ attr = {} - with self.assertRaisesRegexp(xmlrpclib.Fault, 'missing attribute rt'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'missing attribute rt'): s.add_vrf({ 'auth': ad, 'attr': attr }) attr['rt'] = '123:456' - with self.assertRaisesRegexp(xmlrpclib.Fault, 'missing attribute name'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'missing attribute name'): s.add_vrf({ 'auth': ad, 'attr': attr }) attr['name'] = 'test' attr['tags'] = [] @@ -354,7 +354,7 @@ def test_vrf_add_list(self): self.assertEqual(self._mangle_vrf_result(s.list_vrf({ 'auth': ad, 'vrf': { 'id': vrf['id'] } })), [ ref, ]) attr['rt'] = '123:abc' - with self.assertRaisesRegexp(xmlrpclib.Fault, '.'): # TODO: specify exception string + with self.assertRaisesRegex(xmlrpc.client.Fault, '.'): # TODO: specify exception string s.add_vrf({ 'auth': ad, 'attr': attr }) @@ -363,7 +363,7 @@ def test_vrf_edit_default(self): """ Edit the default VRF and verify the change """ # try to set an RT, which should fail on the default VRF - with self.assertRaisesRegexp(xmlrpclib.Fault, 'Invalid input for column rt, must be NULL for VRF id 0'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'Invalid input for column rt, must be NULL for VRF id 0'): s.edit_vrf({ 'auth': ad, 'vrf': { 'id': 0 }, 'attr': { 'rt': '123:456a' }}) res_edit = s.edit_vrf({ 'auth': ad, 'vrf': { 'id': 0 }, 'attr': { @@ -389,26 +389,26 @@ def test_vrf_edit(self): vrf = s.add_vrf({ 'auth': ad, 'attr': attr }) # omitting VRF spec - with self.assertRaisesRegexp(xmlrpclib.Fault, 'vrf specification must be a dict'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'vrf specification must be a dict'): s.edit_vrf({ 'auth': ad, 'attr': { 'name': 'test_vrf_edit' } }) # omitting VRF attributes - with self.assertRaisesRegexp(xmlrpclib.Fault, 'invalid input type, must be dict'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'invalid input type, must be dict'): s.edit_vrf({ 'auth': ad, 'vrf': spec }) # specifying too many attributes in spec - with self.assertRaisesRegexp(xmlrpclib.Fault, 'specification contains too many keys'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'specification contains too many keys'): s.edit_vrf({ 'auth': ad, 'vrf': { 'rt': '65000:123', 'name': '65k:123' }, 'attr': {} }) # test changing ID - with self.assertRaisesRegexp(xmlrpclib.Fault, 'extraneous attribute'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'extraneous attribute'): s.edit_vrf({ 'auth': ad, 'vrf': spec, 'attr': { 'id': 1337 } }) # empty attribute list - with self.assertRaisesRegexp(xmlrpclib.Fault, "'attr' must not be empty."): + with self.assertRaisesRegex(xmlrpc.client.Fault, "'attr' must not be empty."): s.edit_vrf({ 'auth': ad, 'vrf': spec, 'attr': {} }) res = s.list_vrf({ 'auth': ad, 'vrf': spec }) - self.assertEquals(len(res), 1, 'wrong number of VRFs returned') + self.assertEqual(len(res), 1, 'wrong number of VRFs returned') res = res[0] del(res['id']) self.assertEqual(self._mangle_vrf_result(res), attr) @@ -422,7 +422,7 @@ def test_vrf_edit(self): # verify result of valid change res = s.list_vrf({ 'auth': ad, 'vrf': { 'rt': attr['rt'] } }) - self.assertEquals(len(res), 1, 'wrong number of VRFs returned') + self.assertEqual(len(res), 1, 'wrong number of VRFs returned') res = res[0] # ignore the ID del(res['id']) @@ -452,7 +452,7 @@ def test_vrf_add_search(self): 'val2': attr['rt'] } res = self._mangle_vrf_result(s.search_vrf({ 'auth': ad, 'query': q })) - self.assertEquals(res['result'], [ attr, ], 'Search result from equal match did not match') + self.assertEqual(res['result'], [ attr, ], 'Search result from equal match did not match') # regex match q = { @@ -461,11 +461,11 @@ def test_vrf_add_search(self): 'val2': 'instance 65000' } res = self._mangle_vrf_result(s.search_vrf({ 'auth': ad, 'query': q })) - self.assertEquals(res['result'], [ attr, ], 'Search result from regex match did not match') + self.assertEqual(res['result'], [ attr, ], 'Search result from regex match did not match') # smart search res = self._mangle_vrf_result(s.smart_search_vrf({ 'auth': ad, 'query_string': 'forwarding instance' })) - self.assertEquals(res['result'], [ attr, ], 'Smart search result did not match') + self.assertEqual(res['result'], [ attr, ], 'Smart search result did not match') @@ -474,15 +474,15 @@ def test_prefix_add(self): """ # check that some error / sanity checking is there attr = {} - with self.assertRaisesRegexp(xmlrpclib.Fault, "specify 'prefix' or 'from-prefix' or 'from-pool'"): + with self.assertRaisesRegex(xmlrpc.client.Fault, "specify 'prefix' or 'from-prefix' or 'from-pool'"): s.add_prefix({ 'auth': ad, 'attr': attr }) attr['prefix'] = '1.3.3.0/24' - with self.assertRaisesRegexp(xmlrpclib.Fault, "Either description or node must be specified."): + with self.assertRaisesRegex(xmlrpc.client.Fault, "Either description or node must be specified."): s.add_prefix({ 'auth': ad, 'attr': attr }) attr['description'] = 'test prefix' - with self.assertRaisesRegexp(xmlrpclib.Fault, "Unknown prefix type"): + with self.assertRaisesRegex(xmlrpc.client.Fault, "Unknown prefix type"): s.add_prefix({ 'auth': ad, 'attr': attr }) attr['type'] = 'assignment' @@ -846,13 +846,13 @@ def test_prefix_node(self): # node value is not allowed at all for prefixes of type reservation attr['type'] = 'reservation' - with self.assertRaisesRegexp(xmlrpclib.Fault, "Not allowed to set 'node' value for prefixes of type 'reservation'."): + with self.assertRaisesRegex(xmlrpc.client.Fault, "Not allowed to set 'node' value for prefixes of type 'reservation'."): s.add_prefix({ 'auth': ad, 'attr': attr }) # node value is only allowed for assignments when prefix-length is max # (/24 for IPv4 or /128 for IPv6). attr['type'] = 'assignment' - with self.assertRaisesRegexp(xmlrpclib.Fault, "Not allowed to set 'node' value for prefixes of type 'assignment' which do not have all bits set in netmask."): + with self.assertRaisesRegex(xmlrpc.client.Fault, "Not allowed to set 'node' value for prefixes of type 'assignment' which do not have all bits set in netmask."): s.add_prefix({ 'auth': ad, 'attr': attr }) # correct prefix length @@ -936,7 +936,7 @@ def test_prefix_add_to_pool(self): s.edit_prefix({ 'auth': ad, 'prefix': { 'id': prefix['id'] }, 'attr': { 'pool_name': 'pool_1' } }) res = s.list_pool({ 'auth': ad, 'pool': { 'id': pool['id'] } }) - self.assertEquals(res[0]['prefixes'], ['1.3.0.0/16', '1.4.0.0/16', + self.assertEqual(res[0]['prefixes'], ['1.3.0.0/16', '1.4.0.0/16', '1.5.0.0/16', '1.6.0.0/16']) @@ -993,7 +993,7 @@ def test_prefix_from_pool(self): child = s.add_prefix({ 'auth': ad, 'attr': prefix_attr, 'args': args }) #expected['id'] = child['id'] #p = s.list_prefix({ 'auth': ad, 'attr': { 'id': child['id'] } })[1] - #self.assertEquals(p, expected) + #self.assertEqual(p, expected) @@ -1065,7 +1065,7 @@ def test_prefix_from_pool_vrf(self): expected['id'] = child['id'] p = s.list_prefix({ 'auth': ad, 'attr': { 'id': child['id'] } })[1] p = self._mangle_prefix_result(p) - self.assertEquals(p, expected) + self.assertEqual(p, expected) @@ -1229,10 +1229,10 @@ def test_asn_add_list(self): asn = s.list_asn({ 'auth': ad, 'asn': { 'asn': 1 } }) self.assertEqual(len(asn), 1, "Wrong number of ASNs returned.") asn = asn[0] - self.assertEquals(attr, asn, "ASN in database not equal to what was added.") + self.assertEqual(attr, asn, "ASN in database not equal to what was added.") # adding the same ASN again should result in duplicate key error - with self.assertRaisesRegexp(xmlrpclib.Fault, 'Duplicate value for'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'Duplicate value for'): s.add_asn({ 'auth': ad, 'attr': attr }) @@ -1248,7 +1248,7 @@ def test_remove_asn(self): asn = s.add_asn({ 'auth': ad, 'attr': attr }) s.remove_asn({ 'auth': ad, 'asn': { 'asn': asn['asn'] } }) - self.assertEquals(0, len(s.list_asn({ 'auth': ad, 'asn': { 'asn': 2 } })), "Removed ASN still in database") + self.assertEqual(0, len(s.list_asn({ 'auth': ad, 'asn': { 'asn': 2 } })), "Removed ASN still in database") @@ -1263,8 +1263,8 @@ def test_edit_asn(self): asn = s.add_asn({ 'auth': ad, 'attr': attr }) s.edit_asn({ 'auth': ad, 'asn': { 'asn': attr['asn'] }, 'attr': { 'name': 'b0rk' } }) - self.assertEquals(s.list_asn({ 'auth': ad, 'asn': { 'asn': 3 } })[0]['name'], 'b0rk', "Edited ASN still has it's old name.") - with self.assertRaisesRegexp(xmlrpclib.Fault, 'extraneous attribute'): + self.assertEqual(s.list_asn({ 'auth': ad, 'asn': { 'asn': 3 } })[0]['name'], 'b0rk', "Edited ASN still has it's old name.") + with self.assertRaisesRegex(xmlrpc.client.Fault, 'extraneous attribute'): s.edit_asn({ 'auth': ad, 'asn': { 'asn': 3 }, 'attr': {'asn': 4, 'name': 'Test ASN #4'} }) @@ -1287,8 +1287,8 @@ def test_search_asn(self): 'val2': attr['asn'] } res = s.search_asn({ 'auth': ad, 'query': q }) - self.assertEquals(len(res['result']), 1, "equal search resulted in wrong number of hits") - self.assertEquals(res['result'][0]['name'], attr['name'], "search hit got wrong name") + self.assertEqual(len(res['result']), 1, "equal search resulted in wrong number of hits") + self.assertEqual(res['result'][0]['name'], attr['name'], "search hit got wrong name") # regexp match q = { @@ -1297,8 +1297,8 @@ def test_search_asn(self): 'val2': 'number' } res = s.search_asn({ 'auth': ad, 'query': q }) - self.assertEquals(len(res['result']), 1, "regex search resulted in wrong number of hits") - self.assertEquals(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") + self.assertEqual(len(res['result']), 1, "regex search resulted in wrong number of hits") + self.assertEqual(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") @@ -1313,14 +1313,14 @@ def test_smart_search_asn(self): asn = s.add_asn({ 'auth': ad, 'attr': attr }) res = s.smart_search_asn({ 'auth': ad, 'query_string': "Autonomous" }) - self.assertEquals(len(res['result']), 1, "search resulted in wrong number of hits") - self.assertEquals(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") - self.assertEquals(res['interpretation']['interpretation']['attribute'], 'name', 'search term interpreted as wrong type') + self.assertEqual(len(res['result']), 1, "search resulted in wrong number of hits") + self.assertEqual(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") + self.assertEqual(res['interpretation']['interpretation']['attribute'], 'name', 'search term interpreted as wrong type') res = s.smart_search_asn({ 'auth': ad, 'query_string': "5" }) - self.assertEquals(len(res['result']), 1, "search resulted in wrong number of hits") - self.assertEquals(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") - self.assertEquals(res['interpretation']['interpretation']['attribute'], 'asn', "search term interpretated as wrong type") + self.assertEqual(len(res['result']), 1, "search resulted in wrong number of hits") + self.assertEqual(res['result'][0]['asn'], attr['asn'], "search hit got wrong asn") + self.assertEqual(res['interpretation']['interpretation']['attribute'], 'asn', "search term interpretated as wrong type") @@ -1336,17 +1336,17 @@ def test_pool_add_list(self): 'ipv6_default_prefix_length': 112 } - with self.assertRaisesRegexp(xmlrpclib.Fault, 'missing attribute name'): + with self.assertRaisesRegex(xmlrpc.client.Fault, 'missing attribute name'): s.add_pool({ 'auth': ad, 'attr': attr }) attr['name'] = 'pool_1' attr['ipv4_default_prefix_length'] = 50 - with self.assertRaisesRegexp(xmlrpclib.Fault, '1200: \'Default IPv4 prefix length must be an integer between 1 and 32.'): + with self.assertRaisesRegex(xmlrpc.client.Fault, '1200: \'Default IPv4 prefix length must be an integer between 1 and 32.'): s.add_pool({ 'auth': ad, 'attr': attr }) attr['ipv4_default_prefix_length'] = 31 attr['ipv6_default_prefix_length'] = 'over 9000' - with self.assertRaisesRegexp(xmlrpclib.Fault, '1200: \'Default IPv6 prefix length must be an integer between 1 and 128.'): + with self.assertRaisesRegex(xmlrpc.client.Fault, '1200: \'Default IPv6 prefix length must be an integer between 1 and 128.'): s.add_pool({ 'auth': ad, 'attr': attr }) attr['ipv6_default_prefix_length'] = 112 @@ -1363,10 +1363,10 @@ def test_pool_add_list(self): # list pool and verify data in NIPAP p = s.list_pool({ 'auth': ad, 'pool': { 'id': expected['id'] } }) - self.assertEquals(1, len(p), 'Wrong number of pools returned') + self.assertEqual(1, len(p), 'Wrong number of pools returned') p = p[0] - self.assertEquals(self._mangle_pool_result(p), expected, 'Received pool differs from added pool') + self.assertEqual(self._mangle_pool_result(p), expected, 'Received pool differs from added pool') def test_edit_pool(self): @@ -1402,7 +1402,7 @@ def test_edit_pool(self): expected['tags'] = [] expected['avps'] = {} - self.assertEquals(self._mangle_pool_result(s.list_pool({ 'auth': ad, + self.assertEqual(self._mangle_pool_result(s.list_pool({ 'auth': ad, 'pool': { 'id': res['id'] } })[0]), expected) From ff9ddf2ce1a418775c32dbdbbcff5a0bad120a22 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 16:08:38 +0100 Subject: [PATCH 07/37] tests: Updated nipaptest.py for Python 3 compat Updated the nipaptest.py test suite for Python3 compatibility. * dict.keys() -> list(dict.keys()) * Updated assert functions to new names --- tests/nipaptest.py | 72 +++++++++++++++++++++++----------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/tests/nipaptest.py b/tests/nipaptest.py index d83069d17..c608d8954 100755 --- a/tests/nipaptest.py +++ b/tests/nipaptest.py @@ -290,7 +290,7 @@ def test_prefix_edit(self): # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) - self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) + self.assertEqual(['a'], list(res['result'][0].inherited_tags.keys())) # edit p3 to become subnet of p2 p3.prefix = '2.0.0.0/24' @@ -298,7 +298,7 @@ def test_prefix_edit(self): # p3 should have inherited_tags = ['b'] from p2 res = Prefix.smart_search('2.0.0.0/24', {}) - self.assertEqual(['b'], res['result'][0].inherited_tags.keys()) + self.assertEqual(['b'], list(res['result'][0].inherited_tags.keys())) def test_tags1(self): @@ -313,7 +313,7 @@ def test_tags1(self): # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/10', {}) - self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) + self.assertEqual(['a'], list(res['result'][0].inherited_tags.keys())) p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') @@ -321,7 +321,7 @@ def test_tags1(self): # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) - self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) + self.assertEqual(['a'], list(res['result'][0].inherited_tags.keys())) # change tags on top level prefix p1.tags = ['b'] @@ -329,12 +329,12 @@ def test_tags1(self): # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/8', {}) - self.assertEqual([], res['result'][0].inherited_tags.keys()) - self.assertEqual(['b'], res['result'][1].inherited_tags.keys()) - self.assertEqual(['b'], res['result'][2].inherited_tags.keys()) - self.assertEqual(['b'], res['result'][3].inherited_tags.keys()) - self.assertEqual(['b'], res['result'][4].inherited_tags.keys()) - self.assertEqual(['b'], res['result'][5].inherited_tags.keys()) + self.assertEqual([], list(res['result'][0].inherited_tags.keys())) + self.assertEqual(['b'], list(res['result'][1].inherited_tags.keys())) + self.assertEqual(['b'], list(res['result'][2].inherited_tags.keys())) + self.assertEqual(['b'], list(res['result'][3].inherited_tags.keys())) + self.assertEqual(['b'], list(res['result'][4].inherited_tags.keys())) + self.assertEqual(['b'], list(res['result'][5].inherited_tags.keys())) @@ -650,22 +650,22 @@ def test_country_code_length(self): p.status = 'assigned' # try to input one character - should fail - this will be a INSERT operation p.country = 'a' - with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): + with self.assertRaisesRegex(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input one character - should fail - this will be an UPDATE operation p.country = 'a' - with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): + with self.assertRaisesRegex(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input three character - should fail p.country = 'aaa' - with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): + with self.assertRaisesRegex(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input a number character - should fail p.country = 'a1' - with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): + with self.assertRaisesRegex(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input two character - should succeed @@ -1323,7 +1323,7 @@ def test_vrf1(self): ] for bv in broken_values: - with self.assertRaisesRegexp(pynipap.NipapValueError, 'Invalid input for column rt'): + with self.assertRaisesRegex(pynipap.NipapValueError, 'Invalid input for column rt'): v.rt = bv v.save() @@ -1624,7 +1624,7 @@ def test_extra_args(self): from pynipap import NipapError # 'FOO' should not be there and should raise an exception - with self.assertRaisesRegexp(InvalidCommand, 'Invalid argument:'): + with self.assertRaisesRegex(InvalidCommand, 'Invalid argument:'): cmd = Command(nipap_cli.cmds, ['address', 'modify', '1.3.3.1/32', 'vrf_rt', 'none', 'set', 'FOO' ]) @@ -1638,8 +1638,8 @@ def setUp(self): def mock_cfg(self): - import ConfigParser - cfg = ConfigParser.ConfigParser() + import configparser + cfg = configparser.ConfigParser() cfg.add_section('global') cfg.set('global', 'default_vrf_rt', '-') cfg.set('global', 'default_list_vrf_rt', 'all') @@ -1815,7 +1815,7 @@ def test_auto_type7(self): 'description': 'host' } - with self.assertRaisesRegexp(SystemExit, "^1$"): + with self.assertRaisesRegex(SystemExit, "^1$"): nipap_cli.add_prefix({}, opts, {}) @@ -2015,12 +2015,12 @@ def test_prefix4(self): success, query = n._parse_vrf_query('"') expected['interpretation']['string'] = '"' self.assertEqual(success, False) - self.assertEquals(query, expected) + self.assertEqual(query, expected) success, query = n._parse_prefix_query('\'') expected['interpretation']['string'] = '\'' self.assertEqual(success, False) - self.assertEquals(query, expected) + self.assertEqual(query, expected) @@ -2047,12 +2047,12 @@ def test_prefix5(self): success, query = n._parse_prefix_query('(') expected['interpretation']['string'] = '(' self.assertEqual(success, False) - self.assertEquals(query, expected) + self.assertEqual(query, expected) success, query = n._parse_prefix_query(')') expected['interpretation']['string'] = ')' self.assertEqual(success, False) - self.assertEquals(query, expected) + self.assertEqual(query, expected) @@ -2311,7 +2311,7 @@ def test_prefix14(self): success, query = n._parse_vrf_query('foo=bar') self.assertEqual(success, False) - self.assertEquals(expected, query) + self.assertEqual(expected, query) @@ -2338,7 +2338,7 @@ def test_prefix15(self): success, query = n._parse_prefix_query('type=foo') self.assertEqual(success, False) - self.assertEquals(expected, query) + self.assertEqual(expected, query) @@ -3126,7 +3126,7 @@ def test_pool_add_avp(self): p = Pool() p.name = 'test AVP with empty name' p.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): p.save() @@ -3137,7 +3137,7 @@ def test_pool_edit_avp(self): p = th.add_pool('test', 'assignment', 31, 112) p.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): p.save() @@ -3148,7 +3148,7 @@ def test_prefix_add_avp(self): p.status = 'assigned' p.description = 'test AVP with empty name' p.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): p.save() @@ -3157,7 +3157,7 @@ def test_prefix_edit_avp(self): p = th.add_prefix('192.0.2.0/24', 'assignment', 'test AVP with empty name') p.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): p.save() @@ -3166,7 +3166,7 @@ def test_vrf_add_avp(self): v.rt = '123:456' v.name = 'test AVP with empty name' v.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): v.save() @@ -3177,7 +3177,7 @@ def test_vrf_edit_avp(self): v.save() v.avps = { '': '1337' } - with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): + with self.assertRaisesRegex(NipapValueError, "AVP with empty name is not allowed"): v.save() @@ -3198,23 +3198,23 @@ def test_constraints(self): th = TestHelper() d = "test description" th.add_prefix('1.3.0.0/16', 'reservation', d) - with self.assertRaisesRegexp(NipapDuplicateError, "Duplicate"): + with self.assertRaisesRegex(NipapDuplicateError, "Duplicate"): # exact duplicate th.add_prefix('1.3.0.0/16', 'reservation', d) p2 = th.add_prefix('1.3.3.0/24', 'reservation', d) p3 = th.add_prefix('1.3.3.0/27', 'assignment', d) th.add_prefix('1.3.3.0/32', 'host', d) th.add_prefix('1.3.3.1/32', 'host', d) - with self.assertRaisesRegexp(NipapValueError, "Prefix of type host must have all bits set in netmask"): + with self.assertRaisesRegex(NipapValueError, "Prefix of type host must have all bits set in netmask"): # do not allow /31 as type 'host' th.add_prefix('1.3.3.2/31', 'host', d) - with self.assertRaisesRegexp(NipapValueError, "Parent prefix .* is of type assignment"): + with self.assertRaisesRegex(NipapValueError, "Parent prefix .* is of type assignment"): # unable to create assignment within assignment th.add_prefix('1.3.3.3/32', 'assignment', d) - with self.assertRaisesRegexp(NipapValueError, "contains hosts"): + with self.assertRaisesRegex(NipapValueError, "contains hosts"): # unable to remove assignment containing hosts p3.remove() - with self.assertRaisesRegexp(NipapValueError, "'assignment' must not have any subnets other than of type 'host'"): + with self.assertRaisesRegex(NipapValueError, "'assignment' must not have any subnets other than of type 'host'"): p2.type = 'assignment' p2.save() From bdb82ed884a3907c32aae72fa2ed6456b6ff70fe Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 16:24:39 +0100 Subject: [PATCH 08/37] tests: Updated read-only tests for Python3 compat --- .github/workflows/ci.yml | 2 +- tests/{nipap-ro.py => test_nipap_ro.py} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename tests/{nipap-ro.py => test_nipap_ro.py} (99%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2293fca24..3cba698b6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -153,6 +153,6 @@ jobs: nosetests tests/test_xmlrpc.py nosetests tests/nipaptest.py nosetests tests/test_cli.py - nosetests tests/nipap-ro.py + nosetests tests/test_nipap_ro.py nosetests tests/test_rest.py make -C jnipap test diff --git a/tests/nipap-ro.py b/tests/test_nipap_ro.py similarity index 99% rename from tests/nipap-ro.py rename to tests/test_nipap_ro.py index 829c8b9fb..6bb449352 100755 --- a/tests/nipap-ro.py +++ b/tests/test_nipap_ro.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import logging import unittest From 53b276512a35b6e7bfb7c9ca683b50dfae199b49 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 16:28:14 +0100 Subject: [PATCH 09/37] tests: Updated CLI tests for Python 3 compat --- tests/test_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 34a552b76..3c4d5a88f 100755 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -17,10 +17,10 @@ logger.setLevel(logging.DEBUG) log_format = "%(levelname)-8s %(message)s" -import xmlrpclib +import xmlrpc.client server_url = "http://unittest:gottatest@127.0.0.1:1337/XMLRPC" -s = xmlrpclib.Server(server_url, allow_none=1); +s = xmlrpc.client.Server(server_url, allow_none=1); ad = { 'authoritative_source': 'nipap' } From 53797db0e0be6486bfa7b5354fe8af061d8f3b76 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 16:29:54 +0100 Subject: [PATCH 10/37] tests: Updated before/after-tests for Python3 --- tests/upgrade-after.py | 6 +++--- tests/upgrade-before.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/upgrade-after.py b/tests/upgrade-after.py index 16e77c92a..a64a880f8 100644 --- a/tests/upgrade-after.py +++ b/tests/upgrade-after.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # vim: et : # # This is run by Travis-CI after an upgrade to verify that the data loaded by @@ -24,10 +24,10 @@ logger.setLevel(logging.DEBUG) log_format = "%(levelname)-8s %(message)s" -import xmlrpclib +import xmlrpc.client server_url = "http://unittest:gottatest@127.0.0.1:1337/XMLRPC" -s = xmlrpclib.Server(server_url, allow_none=1); +s = xmlrpc.client.Server(server_url, allow_none=1); ad = { 'authoritative_source': 'nipap' } diff --git a/tests/upgrade-before.py b/tests/upgrade-before.py index 6ab3bb85f..76e534762 100644 --- a/tests/upgrade-before.py +++ b/tests/upgrade-before.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # This is run by Travis-CI before an upgrade to load some data into the # database. After the upgrade is complete, the data is verified by From ca48dab71831c7f6620d4418b84e96e76c960ff1 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 23:24:36 +0100 Subject: [PATCH 11/37] build: Updated build system for Python 3 Updated build system (Debian package config files, Makefiles etc) for Python 3. --- nipap-cli/Makefile | 2 +- nipap-www/Makefile | 6 +++--- nipap-www/debian/control | 5 ++--- nipap-www/debian/rules | 2 +- nipap/Makefile | 4 ++-- nipap/debian/control | 8 ++++---- nipap/debian/rules | 2 +- nipap/debian/source/options | 2 +- pynipap/Makefile | 6 +++--- pynipap/debian/control | 4 ++-- whoisd/Makefile | 4 ++-- whoisd/debian/control | 4 ++-- whoisd/debian/rules | 2 +- whoisd/setup.py | 4 ++-- 14 files changed, 27 insertions(+), 28 deletions(-) diff --git a/nipap-cli/Makefile b/nipap-cli/Makefile index 12b68b435..3b1d81708 100644 --- a/nipap-cli/Makefile +++ b/nipap-cli/Makefile @@ -38,7 +38,7 @@ test: check: upload: - python setup.py sdist upload + $(PYTHON) setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/nipap-www/Makefile b/nipap-www/Makefile index a4e8450f5..22d5c33d7 100644 --- a/nipap-www/Makefile +++ b/nipap-www/Makefile @@ -1,7 +1,7 @@ # $Id: Makefile,v 1.6 2011/04/18 17:14:00 lukagarb Exp $ # -PYTHON=`which python` +PYTHON=`which python3` DESTDIR=/ BUILDIR=$(CURDIR)/debian/nipap-www-build PROJECT=nipap-www @@ -27,7 +27,7 @@ buildrpm: builddeb: # build the source package in the parent directory # then rename it to project_version.orig.tar.gz - $(PYTHON) setup.py sdist --dist-dir=../ # --prune + $(PYTHON) setup.py sdist --dist-dir=../ #--prune rename -f 's/$(PROJECT)-(\d.*)\.tar\.gz/$(PROJECT)_$$1\.orig\.tar\.gz/' ../* # build the package debuild -us -uc -sa @@ -38,7 +38,7 @@ test: check: upload: - python setup.py sdist upload + $(PYTHON) setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/nipap-www/debian/control b/nipap-www/debian/control index 828fdff65..5f020ffae 100644 --- a/nipap-www/debian/control +++ b/nipap-www/debian/control @@ -2,13 +2,12 @@ Source: nipap-www Section: web Priority: optional Maintainer: Lukas Garberg -Build-Depends: debhelper (>= 5.0.38), debhelper (>= 7), - python (>= 2.7), python-setuptools (>= 0.6b3) +Build-Depends: debhelper (>= 5.0.38), debhelper (>= 7), python3 (>= 3.6), python-setuptools (>= 0.6b3), dh-python Standards-Version: 4.4.0 Package: nipap-www Architecture: all -Depends: debconf, python (>= 2.7), ${misc:Depends}, python-flask, python-pynipap, nipap-common, python-jinja2 +Depends: debconf, python3 (>= 3.6), ${misc:Depends}, python3-flask, python3-pynipap, nipap-common, python3-jinja2 XB-Python-Version: ${python:Versions} Description: web frontend for NIPAP A web UI for the NIPAP IP address planning service. diff --git a/nipap-www/debian/rules b/nipap-www/debian/rules index bcebae9fb..613576055 100755 --- a/nipap-www/debian/rules +++ b/nipap-www/debian/rules @@ -1,6 +1,6 @@ #!/usr/bin/make -f %: - dh $@ --with python2 --buildsystem=python_distutils + dh $@ --with python3 --buildsystem=python_distutils diff --git a/nipap/Makefile b/nipap/Makefile index b3584fa88..872ae794e 100644 --- a/nipap/Makefile +++ b/nipap/Makefile @@ -27,7 +27,7 @@ buildrpm: sql2py builddeb: sql2py # build the source package in the parent directory # then rename it to project_version.orig.tar.gz - $(PYTHON) setup.py sdist --dist-dir=../ --prune + $(PYTHON) setup.py sdist --dist-dir=../ #--prune rename -f 's/$(PROJECT)-(\d.*)\.tar\.gz/$(PROJECT)_$$1\.orig\.tar\.gz/' ../* # build the package debuild -us -uc @@ -38,7 +38,7 @@ test: check: upload: - python3 setup.py sdist upload + $(PYTHON) setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/nipap/debian/control b/nipap/debian/control index 72dd0ddc4..8a415c35d 100644 --- a/nipap/debian/control +++ b/nipap/debian/control @@ -2,14 +2,14 @@ Source: nipap Maintainer: Lukas Garberg Section: python Priority: optional -Build-Depends: python (>= 3.6), debhelper (>= 7.4.3) +Build-Depends: python3 (>= 3.6), debhelper (>= 7.4.3), dh-python Standards-Version: 4.4.0 Package: nipap-common Architecture: all -Depends: python (>= 3.6), ${misc:Depends}, python-pysqlite2, python-ipy -Suggests: python-ldap +Depends: python3 (>= 3.6), ${misc:Depends}, python3-ipy +Suggests: python3-ldap Description: Neat IP Address Planner The Neat IP Address Planner, NIPAP, is a system built for efficiently managing large amounts of IP addresses. This is the common libraries. @@ -17,7 +17,7 @@ Description: Neat IP Address Planner Package: nipapd Architecture: all -Depends: debconf, nipap-common, python (>= 3.6), ${misc:Depends}, python-psycopg2, python-flask, python-flask-xml-rpc, python-flask-restful, python-flask-compress, python-tornado, python-parsedatetime, python-tz, python-dateutil, python-psutil, python-pyparsing, python-jwt, python-requests +Depends: debconf, nipap-common, python3 (>= 3.6), ${misc:Depends}, python3-psycopg2, python3-flask, python3-flask-xml-rpc-re, python3-flask-restful, python3-flask-compress, python3-tornado, python3-parsedatetime, python3-tz, python3-dateutil, python3-psutil, python3-pyparsing, python3-jwt, python3-requests Description: Neat IP Address Planner XML-RPC daemon The Neat IP Address Planner, NIPAP, is a system built for efficiently managing large amounts of IP addresses. This is the XML-RPC daemon. diff --git a/nipap/debian/rules b/nipap/debian/rules index 3a248f409..fbb18bf44 100755 --- a/nipap/debian/rules +++ b/nipap/debian/rules @@ -4,6 +4,6 @@ # Tue, 18 Oct 2011 21:55:07 +0200 %: - dh $@ --with python2 --buildsystem=python_distutils + dh $@ --with python3 --buildsystem=python_distutils diff --git a/nipap/debian/source/options b/nipap/debian/source/options index e265fa9f9..1c26ef023 100644 --- a/nipap/debian/source/options +++ b/nipap/debian/source/options @@ -1,2 +1,2 @@ # Don't store "changes" made to Makefile and some other files -extend-diff-ignore = "(^|/)(entrypoint.sh|Makefile|xmlbench.py|xml-test.py|MANIFEST|nipapd.man.rst|nipap-passwd.man.rst|clean.plsql|requirements.txt)$" +extend-diff-ignore = "(^|/)(entrypoint.sh|Makefile|xmlbench.py|xml-test.py|MANIFEST|nipapd.man.rst|nipap-passwd.man.rst|clean.plsql|requirements.txt|wait-for-it.sh)$" diff --git a/pynipap/Makefile b/pynipap/Makefile index ea0b0899c..c885e0d2e 100644 --- a/pynipap/Makefile +++ b/pynipap/Makefile @@ -1,7 +1,7 @@ # $Id: Makefile,v 1.6 2011/04/18 17:14:00 lukagarb Exp $ # -PYTHON=`which python` +PYTHON=`which python3` DESTDIR=/ BUILDIR=$(CURDIR)/debian/python-pynipap-build PROJECT=pynipap @@ -26,7 +26,7 @@ buildrpm: builddeb: # build the source package in the parent directory # then rename it to project_version.orig.tar.gz - $(PYTHON) setup.py sdist --dist-dir=../ #--prune + $(PYTHON) setup.py sdist --dist-dir=../ --prune rename -f 's/$(PROJECT)-(\d.*)\.tar\.gz/$(PROJECT)_$$1\.orig\.tar\.gz/' ../* # build the package debuild -us -uc @@ -37,7 +37,7 @@ test: check: upload: - python setup.py sdist upload + $(PYTHON) setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/pynipap/debian/control b/pynipap/debian/control index 95a68abe4..889dea362 100644 --- a/pynipap/debian/control +++ b/pynipap/debian/control @@ -3,7 +3,7 @@ Maintainer: Lukas Garberg Section: python Priority: optional Build-Depends: debhelper (>= 8), dh-python, - python (>= 2.7), + python (>= 2.7) | python2 (>= 2.7), python3 (>= 3.1) X-Python-Version: >= 2.7 X-Python3-Version: >= 3.1 @@ -11,7 +11,7 @@ Standards-Version: 4.4.0 Package: python-pynipap Architecture: all -Depends: ${misc:Depends}, python (>= 2.7) +Depends: ${misc:Depends}, python (>= 2.7) | python2 (>= 2.7) Breaks: ${python:Breaks} Description: Python module for accessing NIPAP This package contains a client library for NIPAP. It's function is similar to diff --git a/whoisd/Makefile b/whoisd/Makefile index dfface729..9086f475c 100644 --- a/whoisd/Makefile +++ b/whoisd/Makefile @@ -1,7 +1,7 @@ # $Id: Makefile,v 1.6 2012/02/29 08:55:00 lukagarb Exp $ # -PYTHON=`which python` +PYTHON=`which python3` DESTDIR=/ BUILDIR=$(CURDIR)/debian/python-nipap-build PROJECT=nipap-whoisd @@ -37,7 +37,7 @@ test: check: upload: - python setup.py sdist upload + $(PYTHON) setup.py sdist upload clean: $(PYTHON) setup.py clean diff --git a/whoisd/debian/control b/whoisd/debian/control index a85e2a7a1..324d4a1c8 100644 --- a/whoisd/debian/control +++ b/whoisd/debian/control @@ -2,12 +2,12 @@ Source: nipap-whoisd Maintainer: Kristian Larsson Section: python Priority: optional -Build-Depends: python (>= 2.7), debhelper (>= 7) +Build-Depends: python3 (>= 3.6), debhelper (>= 7), dh-python Standards-Version: 4.4.0 Package: nipap-whoisd Architecture: all -Depends: ${misc:Depends}, python (>= 2.7), python-pynipap +Depends: ${misc:Depends}, python3 (>= 3.6), python3-pynipap Description: Neat IP Address Planner The NIPAP whois daemon provides a whois-style interface for querying data in the NIPAP backend. It receives whois queries, translates these into search diff --git a/whoisd/debian/rules b/whoisd/debian/rules index 4e61b699d..e843f49b5 100755 --- a/whoisd/debian/rules +++ b/whoisd/debian/rules @@ -4,6 +4,6 @@ # Wed, 29 Feb 2012 09:03:22 +0100 %: - dh $@ --with python2 --buildsystem=python_distutils + dh $@ --with python3 --buildsystem=python_distutils diff --git a/whoisd/setup.py b/whoisd/setup.py index c04f72600..d1b73c8aa 100644 --- a/whoisd/setup.py +++ b/whoisd/setup.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 from distutils.core import setup import subprocess @@ -12,7 +12,7 @@ def get_data_files(): try: subprocess.call(["rst2man", "nipap-whoisd.man.rst", "nipap-whoisd.8"]) except OSError as exc: - print >> sys.stderr, "rst2man failed to run:", str(exc) + print("rst2man failed to run: {}".format(str(exc)), file=sys.stderr) sys.exit(1) files = [ From 521ccfff69a1cb535f6dddf510c5d33e78514358 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 23:50:53 +0100 Subject: [PATCH 12/37] nipap: Updated requirements.txt --- nipap/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipap/requirements.txt b/nipap/requirements.txt index dfba709b9..8007e763f 100644 --- a/nipap/requirements.txt +++ b/nipap/requirements.txt @@ -21,7 +21,7 @@ python-dateutil==2.8.1 python-ldap==3.3.1 pytz==2021.1 pysqlite==2.8.3 -tornado==6.0.4 -wsgiref==0.1.2 pyjwt==1.5.3 +tornado==6.1 +wsgiref==0.1.2 docutils==0.16 From a289c59c60aeaa1fe75fb38a7f4b3e760c833baa Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 10 Nov 2022 23:54:25 +0100 Subject: [PATCH 13/37] ci: Updated build dependencies --- .github/workflows/ci.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3cba698b6..72a963d7e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,9 +37,8 @@ jobs: debhelper \ dh-python \ junit4 \ - python-docutils \ - python-nose \ - python-requests \ + python3-docutils \ + python3-nose \ python3-requests \ python-setuptools \ python3-setuptools \ From 326a182377cbbb3010c5afaca33433782563447a Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 00:01:45 +0100 Subject: [PATCH 14/37] ci: Install using pip3 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 72a963d7e..79e0878d2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,7 +73,7 @@ jobs: if: ${{ matrix.install == 'pip' }} run: | # install nipap dependencies - sudo -H pip install -r nipap/requirements.txt + sudo -H pip3 install -r nipap/requirements.txt # SQL sudo su -c "cd nipap/sql; PGPASSWORD=papin make install" postgres # move configuration file into place From 9c935e70bcbf33759657868a3792203dd2e5cfb3 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 08:59:21 +0100 Subject: [PATCH 15/37] ci: Run nosetests3, add libldap-dev * Run nosetest3 to force Python 3 * Add libldap-dev which is required to install python-ldap from pip --- .github/workflows/ci.yml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 79e0878d2..4c7c5dd40 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,6 +37,7 @@ jobs: debhelper \ dh-python \ junit4 \ + libldap-dev \ python3-docutils \ python3-nose \ python3-requests \ @@ -113,7 +114,7 @@ jobs: sudo nipap-passwd add -u unittest -p gottatest -f /etc/nipap/local_auth.db -n unittest sudo /etc/init.d/nipapd restart # if upgrade, add some data to the database that we can verify later - nosetests tests/upgrade-before.py + nosetests3 tests/upgrade-before.py - name: "Build and install Debian packages" if: ${{ matrix.install == 'apt' }} @@ -143,15 +144,15 @@ jobs: - name: "Verify pre-upgrade data" if: ${{ matrix.upgrade == true }} - run: nosetests tests/upgrade-after.py + run: nosetests3 tests/upgrade-after.py - name: "Run test suite" env: REQUESTS_CA_BUNDLE: /etc/ssl/certs/ca-certificates.crt run: | - nosetests tests/test_xmlrpc.py - nosetests tests/nipaptest.py - nosetests tests/test_cli.py - nosetests tests/test_nipap_ro.py - nosetests tests/test_rest.py + nosetests3 tests/test_xmlrpc.py + nosetests3 tests/nipaptest.py + nosetests3 tests/test_cli.py + nosetests3 tests/test_nipap_ro.py + nosetests3 tests/test_rest.py make -C jnipap test From a7080751928c778f2f688c945a660fddcb242725 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 09:12:42 +0100 Subject: [PATCH 16/37] ci: Run tests on Ubuntu 20.04, install testing --- .github/workflows/ci.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4c7c5dd40..db67c0404 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: jobs: test: - runs-on: ubuntu-18.04 + runs-on: ubuntu-20.04 strategy: matrix: install: [ pip, apt ] @@ -26,7 +26,7 @@ jobs: - name: "Install dependencies and prepare NIPAP" run: | # Set up NIPAP repo - echo "deb http://spritelink.github.io/NIPAP/repos/apt stable main extra" | sudo tee /etc/apt/sources.list.d/nipap.list + echo "deb http://spritelink.github.io/NIPAP/repos/apt testing main extra" | sudo tee /etc/apt/sources.list.d/nipap.list wget -O - https://spritelink.github.io/NIPAP/nipap.gpg.key | sudo apt-key add - sudo apt update -qq @@ -38,6 +38,7 @@ jobs: dh-python \ junit4 \ libldap-dev \ + libsasl2-dev \ python3-docutils \ python3-nose \ python3-requests \ @@ -114,7 +115,7 @@ jobs: sudo nipap-passwd add -u unittest -p gottatest -f /etc/nipap/local_auth.db -n unittest sudo /etc/init.d/nipapd restart # if upgrade, add some data to the database that we can verify later - nosetests3 tests/upgrade-before.py + nosetests tests/upgrade-before.py - name: "Build and install Debian packages" if: ${{ matrix.install == 'apt' }} From fe985a452e480b4c964a48a70af047e4c77002f0 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 09:15:28 +0100 Subject: [PATCH 17/37] ci: Run 18.04 again --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db67c0404..a3bec17c6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: jobs: test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-18.04 strategy: matrix: install: [ pip, apt ] From 0493ad3cdf3969e68d9f8a4fd44f7b9a2ad73be4 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 15:51:19 +0100 Subject: [PATCH 18/37] backend: Fixed log message formatting --- nipap/nipap/backend.py | 2 +- nipap/nipap/smart_parsing.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index c2e57e10d..cc42f303d 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -2820,7 +2820,7 @@ def list_prefix(self, auth, spec=None): understanding. """ - self._logger.debug("list_prefix called; spec: %", spec) + self._logger.debug("list_prefix called; spec: %s", spec) if isinstance(spec, dict): where, params = self._expand_prefix_spec(spec.copy(), 'inp.') diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 7959c6553..19643507e 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -501,7 +501,7 @@ def _string_to_dictsql(self, part): } elif part.getName() == 'ipv6_prefix': - self._logger.debug("Query part '%' interpreted as IPv6 prefix", part.ipv6_prefix[0]) + self._logger.debug("Query part '%s' interpreted as IPv6 prefix", part.ipv6_prefix[0]) strict_prefix = str(IPy.IP(part.ipv6_prefix[0], make_net=True)) interp = { From 2b3d4cc308ba9b2ae243b8bd7263760d66a3c0f3 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 16:03:05 +0100 Subject: [PATCH 19/37] tests: Cast ID to integer before comparison --- tests/test_rest.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_rest.py b/tests/test_rest.py index 35ceafbed..5a7991b07 100644 --- a/tests/test_rest.py +++ b/tests/test_rest.py @@ -214,7 +214,7 @@ def test_prefix_add(self): result = json.loads(text) result = dict([(str(k), str(v)) for k, v in result.items()]) attr['id'] = result['id'] - self.assertGreater(attr['id'], 0) + self.assertGreater(int(attr['id']), 0) # what we expect the above prefix to look like expected = prefix_result_template @@ -319,7 +319,7 @@ def test_prefix_edit(self): attr['type'] = 'assignment' attr['order_id'] = 'test' prefix_id = self._add_prefix(attr) - self.assertGreater(prefix_id, 0) + self.assertGreater(int(prefix_id), 0) # Edit prefix parameters = {'id': prefix_id} @@ -349,7 +349,7 @@ def test_edit_prefix_failure(self): attr['type'] = 'assignment' attr['order_id'] = 'test' prefix_id = self._add_prefix(attr) - self.assertGreater(prefix_id, 0) + self.assertGreater(int(prefix_id), 0) # Try editing without/with broken prefix specifier parameters = {'foo': prefix_id} @@ -389,7 +389,7 @@ def test_prefix_remove(self): attr['type'] = 'assignment' attr['order_id'] = 'test' prefix_id = self._add_prefix(attr) - self.assertGreater(prefix_id, 0) + self.assertGreater(int(prefix_id), 0) # delete prefix parameters = {'id': prefix_id} @@ -420,7 +420,7 @@ def test_prefix_search_case_sensitive(self): attr['order_id'] = add_orderId_value prefix_id = self._add_prefix(attr) attr['id'] = prefix_id - self.assertGreater(attr['id'], 0) + self.assertGreater(int(attr['id']), 0) expected = prefix_result_template expected['display_prefix'] = '1.3.5.0/24' @@ -449,7 +449,7 @@ def test_prefix_search_case_insensitive(self): attr['order_id'] = add_orderId_value prefix_id = self._add_prefix(attr) attr['id'] = prefix_id - self.assertGreater(attr['id'], 0) + self.assertGreater(int(attr['id']), 0) expected = prefix_result_template expected['display_prefix'] = '1.3.6.0/24' From c6446f1038067ec17f037d12db24aee56a7e0726 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 23:57:14 +0100 Subject: [PATCH 20/37] backend: Updated REST module for Py3 --- nipap/nipap/rest.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/nipap/nipap/rest.py b/nipap/nipap/rest.py index d0e626dae..10d3a9fb5 100644 --- a/nipap/nipap/rest.py +++ b/nipap/nipap/rest.py @@ -75,9 +75,9 @@ def _mangle_prefix(res): """ Mangle prefix result """ # fugly cast from large numbers to string to deal with XML-RPC - res['total_addresses'] = unicode(res['total_addresses']) - res['used_addresses'] = unicode(res['used_addresses']) - res['free_addresses'] = unicode(res['free_addresses']) + res['total_addresses'] = str(res['total_addresses']) + res['used_addresses'] = str(res['used_addresses']) + res['free_addresses'] = str(res['free_addresses']) # postgres has notion of infinite while datetime hasn't, if expires # is equal to the max datetime we assume it is infinity and instead @@ -222,7 +222,7 @@ def get(self, args): if query is not None: # Create search query dict from request params query_parts = [] - for field, search_value in query.items(): + for field, search_value in list(query.items()): query_parts.append(get_query_for_field(field, search_value)) search_query = query_parts[0] for query_part in query_parts[1:]: @@ -241,10 +241,10 @@ def get(self, args): return jsonify(result['result']) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) + self.logger.debug(str(exc)) abort(500, error={"code": exc.error_code, "message": str(exc)}) except Exception as err: - self.logger.error(unicode(err)) + self.logger.error(str(err)) abort(500, error={"code": 500, "message": "Internal error"}) @@ -260,10 +260,10 @@ def post(self, args): return jsonify(_mangle_prefix(result)) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) + self.logger.debug(str(exc)) abort(500, error={"code": exc.error_code, "message": str(exc)}) except Exception as err: - self.logger.error(unicode(err)) + self.logger.error(str(err)) abort(500, error={"code": 500, "message": "Internal error"}) @@ -280,10 +280,10 @@ def put(self, args): return jsonify(result) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) + self.logger.debug(str(exc)) abort(500, error={"code": exc.error_code, "message": str(exc)}) except Exception as err: - self.logger.error(unicode(err)) + self.logger.error(str(err)) abort(500, error={"code": 500, "message": "Internal error"}) @@ -296,8 +296,8 @@ def delete(self, args): self.nip.remove_prefix(args.get('auth'), args.get('prefix')) return jsonify(args.get('prefix')) except (AuthError, NipapError) as exc: - self.logger.debug(unicode(exc)) + self.logger.debug(str(exc)) abort(500, error={"code": exc.error_code, "message": str(exc)}) except Exception as err: - self.logger.error(unicode(err)) + self.logger.error(str(err)) abort(500, error={"code": 500, "message": "Internal error"}) From 8dbb85ee923de8d4fee8830400c82c5d8738e555 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Fri, 11 Nov 2022 23:57:43 +0100 Subject: [PATCH 21/37] tests: Updated REST tests to Py3 --- tests/test_rest.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/test_rest.py b/tests/test_rest.py index 5a7991b07..19ec788c0 100644 --- a/tests/test_rest.py +++ b/tests/test_rest.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # vim: et : # @@ -168,7 +168,7 @@ def _convert_list_of_unicode_to_str(self, list_of_items): """ result = [] for item in list_of_items: - item = dict([(str(k), str(v)) for k, v in item.items()]) + item = dict([(str(k), str(v)) for k, v in list(item.items())]) result.append(item) return result @@ -178,7 +178,7 @@ def _add_prefix(self, attr): request = requests.post(self.server_url, headers=self.headers, json = attr) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) return result['id'] @@ -192,17 +192,17 @@ def test_prefix_add(self): request = requests.post(self.server_url, headers=self.headers, json = attr) text = request.text - self.assertRegexpMatches(text,"'attr' must be a dict") + self.assertRegex(text,"'attr' must be a dict") attr['prefix'] = '1.3.3.0/24' request = requests.post(self.server_url, headers=self.headers, json = attr) text = request.text - self.assertRegexpMatches(text, "Either description or node must be specified.") + self.assertRegex(text, "Either description or node must be specified.") attr['description'] = 'test prefix' request = requests.post(self.server_url, headers=self.headers, json = attr) text = request.text - self.assertRegexpMatches(text, "Unknown prefix type") + self.assertRegex(text, "Unknown prefix type") attr['type'] = 'assignment' attr['order_id'] = 'test' @@ -212,7 +212,7 @@ def test_prefix_add(self): request = requests.post(self.server_url, headers=self.headers, json = attr) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) attr['id'] = result['id'] self.assertGreater(int(attr['id']), 0) @@ -220,7 +220,7 @@ def test_prefix_add(self): expected = prefix_result_template expected['id'] = int(attr['id']) expected['display_prefix'] = '1.3.3.0/24' - expected = dict([(str(k), str(v)) for k, v in expected.items()]) + expected = dict([(str(k), str(v)) for k, v in list(expected.items())]) expected.update(attr) # list of prefixes through GET request @@ -242,7 +242,7 @@ def test_prefix_add(self): request = requests.post(self.server_url, headers=self.headers, json = attr, params = parameters) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) # copy expected from 1.3.3.0/24 since we expect most things to look the # same for the new prefix (1.3.3.1/32) from 1.3.3.0/24 @@ -271,7 +271,7 @@ def test_prefix_add(self): request = requests.post(self.server_url, headers=self.headers, json = attr, params = parameters) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) # update expected list expected_host2 = expected_host.copy() expected_host2['id'] = result['id'] @@ -286,7 +286,7 @@ def test_prefix_add(self): request = requests.post(self.server_url, headers=self.headers, json = attr, params = parameters) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) # update expected list expected_host3 = expected_host.copy() expected_host3['id'] = result['id'] @@ -396,7 +396,7 @@ def test_prefix_remove(self): request = requests.delete(self.server_url, headers=self.headers, params=parameters) text = request.text result = json.loads(text) - result = dict([(str(k), str(v)) for k, v in result.items()]) + result = dict([(str(k), str(v)) for k, v in list(result.items())]) expected = { 'prefix': '1.3.4.0/24', @@ -424,7 +424,7 @@ def test_prefix_search_case_sensitive(self): expected = prefix_result_template expected['display_prefix'] = '1.3.5.0/24' - expected = dict([(str(k), str(v)) for k, v in expected.items()]) + expected = dict([(str(k), str(v)) for k, v in list(expected.items())]) expected.update(attr) parameters = {'order_id': search_orderId_value} @@ -453,7 +453,7 @@ def test_prefix_search_case_insensitive(self): expected = prefix_result_template expected['display_prefix'] = '1.3.6.0/24' - expected = dict([(str(k), str(v)) for k, v in expected.items()]) + expected = dict([(str(k), str(v)) for k, v in list(expected.items())]) expected.update(attr) parameters = {'order_id': search_orderId_value} From 728ba79b4b98d47733b78be2af2d7b7727f5192d Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 15:52:39 +0200 Subject: [PATCH 22/37] www: Updated imports for Python3 --- nipap-www/nipapwww/ng.py | 2 +- nipap-www/nipapwww/prefix.py | 2 +- nipap-www/nipapwww/xhr.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nipap-www/nipapwww/ng.py b/nipap-www/nipapwww/ng.py index 3ebf6c903..9a02fbdb4 100644 --- a/nipap-www/nipapwww/ng.py +++ b/nipap-www/nipapwww/ng.py @@ -9,7 +9,7 @@ from flask import Blueprint, redirect, render_template, url_for -from auth import login_required +from .auth import login_required bp = Blueprint('ng', __name__, url_prefix='/ng') diff --git a/nipap-www/nipapwww/prefix.py b/nipap-www/nipapwww/prefix.py index 4cf3caf41..44ba7856a 100644 --- a/nipap-www/nipapwww/prefix.py +++ b/nipap-www/nipapwww/prefix.py @@ -1,6 +1,6 @@ from flask import Blueprint, g, render_template -from auth import login_required +from .auth import login_required bp = Blueprint('prefix', __name__, url_prefix='/prefix') diff --git a/nipap-www/nipapwww/xhr.py b/nipap-www/nipapwww/xhr.py index 9006f165e..21e1f7897 100644 --- a/nipap-www/nipapwww/xhr.py +++ b/nipap-www/nipapwww/xhr.py @@ -4,7 +4,7 @@ from pynipap import NipapError, Pool, Prefix, Tag, VRF -from auth import login_required +from .auth import login_required bp = Blueprint('xhr', __name__, url_prefix='/xhr') From 1cea16ca31d3577c26309c7c8cd7ec91524a3e1f Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 16:02:35 +0200 Subject: [PATCH 23/37] nipap: Remove built-in requirements Remove requirements which are built into Python 3. --- nipap/requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/nipap/requirements.txt b/nipap/requirements.txt index 8007e763f..ef817ae79 100644 --- a/nipap/requirements.txt +++ b/nipap/requirements.txt @@ -20,8 +20,6 @@ python-dateutil==2.8.1 # optional dependency on ldap python-ldap==3.3.1 pytz==2021.1 -pysqlite==2.8.3 pyjwt==1.5.3 tornado==6.1 -wsgiref==0.1.2 docutils==0.16 From 1f25915aa3ec366b8baba7c16c587f072a70c03f Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 21:34:03 +0000 Subject: [PATCH 24/37] nipapd: Remove unicode param to register_hstore ... as it's not available in Python 3. --- nipap/nipap/backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index cc42f303d..521efd2a6 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -723,7 +723,7 @@ def _connect_db(self): self._con_pg = psycopg2.connect(**db_args) self._con_pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self._curs_pg = self._con_pg.cursor(cursor_factory=psycopg2.extras.DictCursor) - psycopg2.extras.register_hstore(self._con_pg, globally=True, unicode=True) + psycopg2.extras.register_hstore(self._con_pg, globally=True) except psycopg2.Error as exc: if re.search("database.*does not exist", str(exc)): raise NipapDatabaseNonExistentError("Database '%s' does not exist" % db_args['database']) From 2d9dc8bb22775a57b45628371e122e2b4b42a2a8 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 22:51:39 +0000 Subject: [PATCH 25/37] nipapd: Replace "" with nothing There was a special case in the deb package postinst action which set the psql database host to "" if it was configured as localhost. Not sure why this was needed, but "" caused psycopg2 to break in Python 3, could be due to different handling in the config parser... Trying to write nothing instead of "". --- nipap/debian/nipapd.postinst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipap/debian/nipapd.postinst b/nipap/debian/nipapd.postinst index e0ede1f4a..35f5f8c16 100644 --- a/nipap/debian/nipapd.postinst +++ b/nipap/debian/nipapd.postinst @@ -50,7 +50,7 @@ case "$1" in fi # fill in db_host if [ $DB_HOST = "localhost" ]; then - sed -e 's/{{DB_HOST}}/""/' -i /etc/nipap/nipap.conf + sed -e 's/{{DB_HOST}}//' -i /etc/nipap/nipap.conf else sed -e "s/{{DB_HOST}}/$DB_HOST/" -i /etc/nipap/nipap.conf fi From 82804069bce723eebf7c4fc3424f7c21964ae201 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 22:54:31 +0000 Subject: [PATCH 26/37] www: Updated more deb deps to python3- --- nipap-www/debian/control | 2 +- nipap-www/debian/pydist-overrides | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nipap-www/debian/control b/nipap-www/debian/control index 5f020ffae..51c711924 100644 --- a/nipap-www/debian/control +++ b/nipap-www/debian/control @@ -2,7 +2,7 @@ Source: nipap-www Section: web Priority: optional Maintainer: Lukas Garberg -Build-Depends: debhelper (>= 5.0.38), debhelper (>= 7), python3 (>= 3.6), python-setuptools (>= 0.6b3), dh-python +Build-Depends: debhelper (>= 5.0.38), debhelper (>= 7), python3 (>= 3.6), python3-setuptools (>= 0.6b3), dh-python Standards-Version: 4.4.0 Package: nipap-www diff --git a/nipap-www/debian/pydist-overrides b/nipap-www/debian/pydist-overrides index bbef4a327..9302d319c 100644 --- a/nipap-www/debian/pydist-overrides +++ b/nipap-www/debian/pydist-overrides @@ -1,3 +1,3 @@ -Jinja2 python-jinja2 -pynipap python-pynipap +Jinja2 python3-jinja2 +pynipap python3-pynipap nipap nipap-common From 8e79c2544b4d6ffc4b85c788ff9b4ccf8c2cb24a Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 15:54:18 +0200 Subject: [PATCH 27/37] test: Run tests on Ubuntu 20.04 --- .github/workflows/ci.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a3bec17c6..fd43fc30a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: jobs: test: - runs-on: ubuntu-18.04 + runs-on: ubuntu-20.04 strategy: matrix: install: [ pip, apt ] @@ -42,20 +42,18 @@ jobs: python3-docutils \ python3-nose \ python3-requests \ - python-setuptools \ python3-setuptools \ - python-wheel \ python3-wheel \ python3-all \ default-jdk \ gradle \ rename \ - postgresql-10 \ - postgresql-10-ip4r + postgresql-12 \ + postgresql-12-ip4r # Drop and re-create cluster to be bound to default port for version in `pg_lsclusters | tail -n-2 | awk '{ print $1 }'`; do sudo pg_dropcluster $version main; done - sudo pg_createcluster --start 10 main + sudo pg_createcluster --start 12 main sed -e 's/username = guest/username = unittest/' -e 's/password = guest/password = gottatest/' -e 's/port = 1337/port = 1338/' -e "s/#use_ssl = false/use_ssl = true/" nipap-cli/nipaprc > ~/.nipaprc chmod 0600 ~/.nipaprc From 94a45458229632905ae18da0ec04b8428d0c841a Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Wed, 5 Jul 2023 23:19:04 +0000 Subject: [PATCH 28/37] ci: Misc changes for Unbuntu 20.04 To get the tests to pass in Unbuntu 20.04 a few work arounds were needed. Over time the workarounds should be removed, but now I need progress. * Change how nipapd is started when running the apt tests. From some reason the daemon won't start with systemd, so let's just start it manually. Also, it by default tries to drop privileges and then cannot read some postgres certificate in /root. Thus, avoid dropping privileges. And yes, the daemon won't start from systemd without dropping privileges either. * Run the CLI tests over plain-text HTTP. With the newer versions of TLS libs in 20.04, a CN is not enough but a subject alternative name (SAN) is required as well. It's a bit more tricky to generate a cert with SAN, so instead I let the CLI tests run over plain-text HTTP instead. The REST tests pass over TLS (with warnings regarding missing SAN). * As the Python 2 version of NIPAP cannot be installed on Ubuntu 20.04, the upgrade tests are disabled until we can upgrade from a Python 3-based version. * Enabled an "Accident analysis"-step in the CI pipeline. If the test fails it's run and gathers some data for debugging. More debugging ci: Removed debugging ci: Cleanup for starting nipapd manually --- .github/workflows/ci.yml | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd43fc30a..a416ab3f9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,10 @@ on: pull_request: branches: [ master ] +env: + ACTIONS_RUNNER_DEBUG: true + ACTIONS_STEP_DEBUG: true + jobs: test: @@ -13,7 +17,8 @@ jobs: strategy: matrix: install: [ pip, apt ] - upgrade: [ true, false ] + # Upgrade tests temporary disabled as Python 2 version cannot be installed on Ubuntu 20.04 + upgrade: [ false ] exclude: - install: pip upgrade: true @@ -54,7 +59,7 @@ jobs: # Drop and re-create cluster to be bound to default port for version in `pg_lsclusters | tail -n-2 | awk '{ print $1 }'`; do sudo pg_dropcluster $version main; done sudo pg_createcluster --start 12 main - sed -e 's/username = guest/username = unittest/' -e 's/password = guest/password = gottatest/' -e 's/port = 1337/port = 1338/' -e "s/#use_ssl = false/use_ssl = true/" nipap-cli/nipaprc > ~/.nipaprc + sed -e 's/username = guest/username = unittest/' -e 's/password = guest/password = gottatest/' nipap-cli/nipaprc > ~/.nipaprc chmod 0600 ~/.nipaprc # Set up CA and generate SSL cert @@ -90,7 +95,7 @@ jobs: # install nipap-cli dependencies sudo -H pip3 install -r nipap-cli/requirements.txt # start nipap backend - nipap/nipapd --no-pid-file -c /etc/nipap/nipap.conf + nipap/nipapd --no-pid-file -c /etc/nipap/nipap.conf -df 2>&1 > /tmp/nipap.log & - name: "Install latest release from apt" if: ${{ matrix.install == 'apt' && matrix.upgrade == true }} @@ -132,14 +137,15 @@ jobs: if [ `grep -c ssl_port /etc/nipap/nipap.conf` -eq 0 ]; then \ # No SSL config in file - add from scratch sudo sed '/^port *=.*/a ssl_port = 1338\nssl_cert_file = \/tmp\/ca\/test.bundle.crt\nssl_key_file = \/tmp\/ca\/test.key' -i /etc/nipap/nipap.conf; \ - else \ + else \ sudo sed -e "s/#ssl_port.\+$/ssl_port = 1338/" -e "s/#ssl_cert_file.\+$/ssl_cert_file = \/tmp\/ca\/test.bundle.crt/" -e "s/#ssl_key_file.\+$/ssl_key_file = \/tmp\/ca\/test.key/" -i /etc/nipap/nipap.conf; \ - fi + fi # create local user for unittests sudo nipap/nipap-passwd add -u unittest -p gottatest -f /etc/nipap/local_auth.db -n "User for running unit tests" sudo nipap/nipap-passwd add -u readonly -p gottatest -f /etc/nipap/local_auth.db --readonly -n "Read-only user for running unit tests" - sudo sed -e "s/db_host *= *[^ ]\+/db_host = localhost/" -i /etc/nipap/nipap.conf - sudo /etc/init.d/nipapd restart + sudo sed -e "s/^db_host *=.*/db_host = localhost/" -e "s/{{SYSLOG}}/true/" -e "s/^debug.\+/debug = true/" -e "s/^user/#user/" -i /etc/nipap/nipap.conf + sudo systemctl stop nipapd.service + sudo nipapd --no-pid-file -c /etc/nipap/nipap.conf -df 2>&1 > /tmp/nipap.log & - name: "Verify pre-upgrade data" if: ${{ matrix.upgrade == true }} @@ -155,3 +161,10 @@ jobs: nosetests3 tests/test_nipap_ro.py nosetests3 tests/test_rest.py make -C jnipap test + + - name: "Accident analysis" + if: failure() + run: | + sudo cat /etc/nipap/nipap.conf + sudo cat /var/log/syslog + sudo cat /tmp/nipap.log || true From 17e2bd1fe53012805da1d5fccc7f698a4493b534 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 10:04:00 +0200 Subject: [PATCH 29/37] www: Move imports into function Moved the imports in __init__.py in the nipapwww module to be able to import the module during package build without having the dependencies installed. --- nipap-www/nipapwww/__init__.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nipap-www/nipapwww/__init__.py b/nipap-www/nipapwww/__init__.py index a87d87c55..b7535e54f 100644 --- a/nipap-www/nipapwww/__init__.py +++ b/nipap-www/nipapwww/__init__.py @@ -1,11 +1,3 @@ -import os - -from flask import Flask, redirect, url_for - -from nipap.nipapconfig import NipapConfig - -import pynipap - __version__ = "0.31.2" __author__ = "Kristian Larsson, Lukas Garberg" __author_email__ = "kll@tele2.net, lukas@spritelink.net" @@ -15,6 +7,14 @@ def create_app(test_config=None): + + # Moved imports here to be able to import this module without having the + # dependencies installed. Relevant during initial package build. + import os + from flask import Flask, redirect, url_for + from nipap.nipapconfig import NipapConfig + import pynipap + # create and configure the app app = Flask(__name__, instance_relative_config=True) app.config.from_mapping( From e8c6573c507a64ca20d37758f6a34462f01a6dff Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 13:02:33 +0200 Subject: [PATCH 30/37] nipapd: Fix PID file handling Fix the PID file handling. Not sure if the changes were introduced in the transition from Python 2 to 3 or later, but changes were needed as the file weren't created on start, PID wasn't visible in the error message when starting two nipapd processes and the PID file wasn't truncated before adding a new PID. --- nipap/nipapd | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nipap/nipapd b/nipap/nipapd index 73a878f98..e544deda3 100755 --- a/nipap/nipapd +++ b/nipap/nipapd @@ -209,7 +209,8 @@ if __name__ == '__main__': if cfg.get('nipapd', 'pid_file') and not args.no_pid_file: # need a+ to be able to read PID from file try: - lf = open(cfg.get('nipapd', 'pid_file'), 'r+', 300) + lf = open(cfg.get('nipapd', 'pid_file'), 'a+') + lf.seek(0) except IOError as exc: logger.error("Unable to open PID file '" + str(exc.filename) + "': " + str(exc.strerror)) sys.exit(1) @@ -219,7 +220,7 @@ if __name__ == '__main__': logger.error('NIPAPd already running (pid: ' + lf.read().strip() + ')') sys.exit(1) logger.debug('Writing PID to file: ' + cfg.get('nipapd', 'pid_file')) - lf.truncate() + lf.truncate(0) lf.write('%d\n' % os.getpid()) lf.flush() From ce592f7625fd93c51de956651d9198e121fef008 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 14:22:10 +0200 Subject: [PATCH 31/37] nipapd: Fix handling of number of forks Fixed how selection of number of forks to run was handled. --- nipap/nipap.conf.dist | 2 +- nipap/nipapd | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/nipap/nipap.conf.dist b/nipap/nipap.conf.dist index 648a4aa55..bb72d9c37 100644 --- a/nipap/nipap.conf.dist +++ b/nipap/nipap.conf.dist @@ -60,7 +60,7 @@ debug = false ; enable debug logging fork = 0 ; number of forks # fork processes, mutually exclusive with foreground. 0 = automatically # determine number of forks (same as number of CPUs). -1 = no forking, >0 = -# number of forks +# number of forks. Default is to automatically determine number of forks. syslog = {{SYSLOG}} ; log to syslog diff --git a/nipap/nipapd b/nipap/nipapd index e544deda3..42f65ca22 100755 --- a/nipap/nipapd +++ b/nipap/nipapd @@ -275,9 +275,11 @@ if __name__ == '__main__': ssl_socket = bind_sockets(ssl_port, address) ssl_sockets += ssl_socket - num_forks = -1 + num_forks = None try: - if cfg.getint('nipapd', 'forks') == 0: + if cfg.getint('nipapd', 'forks') == -1: + num_forks = False + elif cfg.getint('nipapd', 'forks') == 0: num_forks = None elif cfg.getint('nipapd', 'forks') > 0: num_forks = cfg.getint('nipapd', 'forks') @@ -285,7 +287,7 @@ if __name__ == '__main__': pass # pre-fork if we are not running in foreground - if not cfg.getboolean('nipapd', 'foreground') and num_forks >= 0: + if not cfg.getboolean('nipapd', 'foreground') and num_forks is not False: # default is to fork as many processes as there are cores tornado.process.fork_processes(num_forks) From 0bac5806f4ecd3a93f73f01aef23f7b42bf1f8b4 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 14:24:27 +0200 Subject: [PATCH 32/37] ci: Run apt nipapd from systemctl Try running nipapd from apt again, after fixing handling of forks and PID file. --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a416ab3f9..70888156e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -144,8 +144,7 @@ jobs: sudo nipap/nipap-passwd add -u unittest -p gottatest -f /etc/nipap/local_auth.db -n "User for running unit tests" sudo nipap/nipap-passwd add -u readonly -p gottatest -f /etc/nipap/local_auth.db --readonly -n "Read-only user for running unit tests" sudo sed -e "s/^db_host *=.*/db_host = localhost/" -e "s/{{SYSLOG}}/true/" -e "s/^debug.\+/debug = true/" -e "s/^user/#user/" -i /etc/nipap/nipap.conf - sudo systemctl stop nipapd.service - sudo nipapd --no-pid-file -c /etc/nipap/nipap.conf -df 2>&1 > /tmp/nipap.log & + sudo systemctl restart nipapd.service - name: "Verify pre-upgrade data" if: ${{ matrix.upgrade == true }} From 9bb5a2b5ffdcbd526bb0e2e15699d5d151cb5a25 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 15:48:36 +0200 Subject: [PATCH 33/37] docs: Update WSGI setup to Python 3 Updated the WSGI setup guide to instruct the user to install Python 3 version of mod_wsgi. --- docs/config-www.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/config-www.rst b/docs/config-www.rst index 504d5df21..163d7728e 100644 --- a/docs/config-www.rst +++ b/docs/config-www.rst @@ -53,7 +53,7 @@ Apache httpd with mod_wsgi ========================== Begin by installing Apache httpd with mod_wsgi:: - apt-get install libapache2-mod-wsgi + apt-get install libapache2-mod-wsgi-py3 Then, add a new virtual host or configure the default one with the line:: From 501c4c97d1830d8938180c1d39f3facb50cb3fec Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 15:56:42 +0200 Subject: [PATCH 34/37] www: Changes for Python 3 compatibility Surprisingly small changes to make the WWW UI Python 3 compatible. --- nipap-www/nipapwww/xhr.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nipap-www/nipapwww/xhr.py b/nipap-www/nipapwww/xhr.py index 21e1f7897..3e67afd3f 100644 --- a/nipap-www/nipapwww/xhr.py +++ b/nipap-www/nipapwww/xhr.py @@ -12,7 +12,7 @@ def validate_string(req, key): - if isinstance(req[key], basestring) and req[key].strip() != '': + if isinstance(req[key], str) and req[key].strip() != '': return req[key].strip() else: return None @@ -591,7 +591,7 @@ def add_prefix(): # Sanitize input parameters if 'vrf' in request.json: try: - if request.json['vrf'] is None or len(unicode(request.json['vrf'])) == 0: + if request.json['vrf'] is None or len(str(request.json['vrf'])) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.json['vrf'])) @@ -724,7 +724,7 @@ def edit_prefix(id): if 'vrf' in request.json: try: - if request.json['vrf'] is None or len(unicode(request.json['vrf'])) == 0: + if request.json['vrf'] is None or len(str(request.json['vrf'])) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.json['vrf'])) @@ -802,7 +802,7 @@ def del_current_vrf(): """ Remove VRF to filter list session variable """ - vrf_id = unicode(request.json['vrf_id']) + vrf_id = str(request.json['vrf_id']) if vrf_id in session['current_vrfs']: del session['current_vrfs'][vrf_id] session.modified = True @@ -820,7 +820,7 @@ def get_current_vrfs(): """ # Verify that all currently selected VRFs still exists - cur_vrfs = session.get('current_vrfs', {}).items() + cur_vrfs = list(session.get('current_vrfs', {}).items()) if len(cur_vrfs) > 0: q = { 'operator': 'equals', From ec64fe6a1743f62cd197e11df6fdf2cb2fb7e275 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 16:09:20 +0200 Subject: [PATCH 35/37] test: Run REST tests over plain-text HTTP From some reason the REST tests suddenly starts to fail over HTTPS in the CI environment. There is anoher HTTPS-related problem for XML-RPC related to SNI, but the REST tests previously passed with warnings for this. Disabling HTTPS for REST tests for now. --- tests/test_rest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_rest.py b/tests/test_rest.py index 19ec788c0..eccf2b795 100644 --- a/tests/test_rest.py +++ b/tests/test_rest.py @@ -71,7 +71,7 @@ class NipapRestTest(unittest.TestCase): cfg = None nipap = None - server_url = "https://unittest:gottatest@127.0.0.1:1338/rest/v1/prefixes" + server_url = "http://unittest:gottatest@127.0.0.1:1337/rest/v1/prefixes" headers = {"NIPAP-Authoritative-Source": "nipap", "NIPAP-Username": "unittest", "NIPAP-Full-Name": "unit tester"} def setUp(self): From 2ac35c80f0aea8efa04643b29dd0ad31217fc5d8 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 16:36:20 +0200 Subject: [PATCH 36/37] nipapd: Fix opening of missing config files Revert changes in how config files were opened which was introduced in the Python 3 conversion. The change caused an uncaught exception when trying to open a non-existant config file. --- nipap/nipap/nipapconfig.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nipap/nipap/nipapconfig.py b/nipap/nipap/nipapconfig.py index 916ba569d..b5e237729 100644 --- a/nipap/nipap/nipapconfig.py +++ b/nipap/nipap/nipapconfig.py @@ -48,9 +48,9 @@ def __init__(self, cfg_path=None): configparser.ConfigParser.__init__(self, DEFAULT, inline_comment_prefixes=";#") - self.read_file() + self.read_config_file() - def read_file(self): + def read_config_file(self): """ Read the configuration file """ @@ -59,7 +59,8 @@ def read_file(self): return try: - self.read([self._cfg_path]) + cfg_fp = open(self._cfg_path, 'r') + self.read_file(cfg_fp) except IOError as exc: raise NipapConfigError(str(exc)) From 7539743f11ab4fe2d662ff73706ca216d6bc9772 Mon Sep 17 00:00:00 2001 From: Lukas Garberg Date: Thu, 5 Oct 2023 21:23:54 +0200 Subject: [PATCH 37/37] backend: Handle db_host = "" The behavour of configparser seems to have changed, where the Python 2 version returned an empty string when a value in the config file was set to "", two double quotes, and the Python 3 version returns a string containing two double quotes which the PostgreSQL client library dislikes as database host. As this is the default config generated by the Debian install scripts it's probably pretty common, so a workaround was implemented where the string "" results in the previous behaviour. --- nipap/nipap/backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 521efd2a6..09093e418 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -711,7 +711,7 @@ def _connect_db(self): db_args['port'] = self._cfg.get('nipapd', 'db_port') # delete keys that are None, for example if we want to connect over a # UNIX socket, the 'host' argument should not be passed into the DSN - if db_args['host'] is not None and db_args['host'] == '': + if db_args['host'] is not None and db_args['host'] in ('', '""'): db_args['host'] = None for key in db_args.copy(): if db_args[key] is None: